query
stringlengths 7
9.55k
| document
stringlengths 10
363k
| metadata
dict | negatives
sequencelengths 0
101
| negative_scores
sequencelengths 0
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
end test Test that the graph class will not let you to_s on an incomplete graph | def test_graph_to_s_incomplete
sut_graph = Graph.new
sut_graph.name="test_graph"
#sut_graph.type=:digraph
sut_graph.node_style=:ellipse
sut_graph.add_edge("TEST1" , "TEST2" , "take_me_to_test_2")
assert_raises RuntimeError do
returned_obj = sut_graph.to_s
end # end assert
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_to_s_empty_graph\n graph = Graph.new\n\n assert(graph.to_s == '')\n end",
"def test_to_s\n graph = DirectedGraph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('a','b').add_edge('c','b')\n\n assert(graph.to_s == 'a=>b,b=>,c=>b')\n end",
"def test_to_s_only_vertices\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n\n assert(graph.to_s == 'a=>,b=>,c=>')\n end",
"def test_to_s\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('a','b').add_edge('c','b')\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end",
"def test_graph_to_s\n sut_graph = Graph.new\n sut_graph.name=\"test_graph\" \n sut_graph.type=:digraph\n sut_graph.node_style=:ellipse\n #sut_graph.add_node \"TEST1\"\n #sut_graph.add_node \"TEST2\"\n sut_graph.add_edge(\"TEST1\" , \"TEST2\" , \"take_me_to_test_2\")\n \n \n returned_obj = sut_graph.to_s\n assert( returned_obj.instance_of?(String) , \"Check to_s returns String, returns: #{returned_obj.class}\" )\n assert(returned_obj.scan(/test_graph/).length==1 , \"Check once occurence of graph name in dot to_s.\")\n assert(returned_obj.scan(/digraph test_graph/).length==1 , \"Check graph type and name in dot to_s.\") \n assert(returned_obj.scan(/shape = ellipse/).length==1 , \"Check graph node style in dot to_s.\") \n #assert(returned_obj.scan(/TEST1\\;/).length==1 , \"Check that Node definition is included: TEST1;\")\n #assert(returned_obj.scan(/TEST2\\;/).length==1 , \"Check that Node definition is included: TEST2}\")\n assert(returned_obj.scan(/label = \\\"take_me_to_test_2\"/).length==1 , \"Check that arc label is included\")\n \n end",
"def test_build_from_hash\n graph = DirectedGraph.new\n graph.build({'a'=>nil,'b'=>'c','c'=>nil})\n\n assert(graph.to_s == 'a=>,b=>c,c=>')\n end",
"def test_build_from_string2\n graph = Graph.new\n graph.build('a=>b')\n\n assert(graph.to_s == 'a=>b,b=>a')\n end",
"def test_build_from_string\n graph = DirectedGraph.new\n graph.build('a=>,b=>c,c=>')\n\n assert(graph.to_s == 'a=>,b=>c,c=>')\n end",
"def test_print_empty\n assert_output(\"Empty graph!\\n\") { @graph.print }\n end",
"def test_new_graph_not_nil\n refute_nil(@graph)\n assert_kind_of(Graph, @graph)\n end",
"def test_build_from_hash\n graph = Graph.new\n graph.build({'a'=>'b','c'=>'b'})\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end",
"def test_build_from_string3\n graph = Graph.new\n graph.build('a=>,c=>b')\n\n assert(graph.to_s == 'a=>,c=>b,b=>c')\n end",
"def test_has_edges_when_empty\n graph = Graph.new\n\n assert(graph.has_edges() == false)\n end",
"def to_dot (params={}) to_dot_graph(params).to_s; end",
"def to_dot (params={}) to_dot_graph(params).to_s; end",
"def graph_to_s\n string = \"\"\n @nodes.each do |name, node|\n string +=\"#{name}:\\n\\t(#{node.name}, #{node.country}, #{node.continent} #{node.timezone}, #{node.coords}, #{node.pop}, #{node.region}) => #{@edges[name]} \\n\"\n end\n string\n end",
"def test_build_from_string\n graph = Graph.new\n graph.build('a=>b,c=>b')\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end",
"def generate_graph\n end",
"def test_has_edge_no_edge\n assert(@graph.has_edge('b', 'd') == false)\n end",
"def to_s\n result = ''\n\n # Return an empty string for an empty graph\n return result unless @vertices.length > 0\n\n @vertices.each do |vertex|\n added = false\n vertex.neighbours.each_with_index do |value, neighbour_index|\n if (value == true)\n added = true\n result << vertex.name << '=>' << @vertices[neighbour_index].name << ','\n end\n end\n # if there has been no edges for the vertex\n result << vertex.name << '=>,' unless added\n end\n\n # remove trailing comma\n result.chop\n end",
"def test_add_edge_no_vertices\n graph = Graph.new\n\n exception = assert_raises GraphError do\n graph.add_edge('a','b')\n end\n\n assert_equal('No edges can be added to an empty graph', exception.message)\n end",
"def test_print\r\n reader = GraphReader.new('whatever')\r\n reader.add_node(0, 'A', nil)\r\n assert_output(\"\\nNumber: 0\\nLetter: A\\nNeighbors: none\\n\") {reader.print_graph()}\r\n end",
"def test_create_graph\r\n \tnode1 = Node.new(1, 'C', [2, 3])\r\n \tnode2 = Node.new(2, 'A', [3, 4, 6])\r\n \tnode3 = Node.new(3, 'K', [5])\r\n \tnode4 = Node.new(4, 'T', nil)\r\n \tnode5 = Node.new(5, 'E', nil)\r\n \tnode6 = Node.new(6, 'B', nil)\r\n \tsmall_graph = [node1, node2, node3, node4, node5, node6]\r\n \treader = GraphReader.new('small_graph.txt')\r\n \treader.create_graph()\r\n \treader.graph.shift\r\n \tassert_equal reader.graph.map(&:letter), small_graph.map(&:letter)\r\n end",
"def test_build_from_string_invalid2\n graph = Graph.new\n\n exception = assert_raises ArgumentError do\n graph.build('a=>b,=>b')\n end\n\n assert_equal('String representation of the graph is invalid', exception.message)\n end",
"def test_build_from_string_invalid\n graph = Graph.new\n\n exception = assert_raises ArgumentError do\n graph.build('a=>c=>b')\n end\n\n assert_equal('String representation of the graph is invalid', exception.message)\n end",
"def down(graph)\n raise 'Abstract'\n end",
"def test_has_edges_vertices_only\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph = Graph.new\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n\n assert(graph.has_edges() == false)\n end",
"def test_has_edge_no_vertices\n assert(@graph.has_edge('vertex1', 'vertex2') == false)\n end",
"def inspect; to_ast.inspect end",
"def to_s; nil; end",
"def test_get_node_invalid\r\n reader = GraphReader.new('small_graph.txt')\r\n reader.create_graph()\r\n assert_nil reader.get_node(-1) \r\n end",
"def test_make_graph\n\t\ttest_main = Main.new(3, 4, 6)\n\t\ttest_graph = Graph.new(10)\n\t\ttest_graph = test_main.make_graph(test_graph)\n\t\tassert_equal test_graph.get_loc(\"Matzburg\").paths.length, 4\n\tend",
"def test_graph_edges\n test_g = Graph.new\n test_g.add_edge(\"a\", \"b\")\n puts \"assert adding edge to a graph adds its nodes too\"\n assert (test_g.contains(\"a\"))\n\n puts \"assert that edges are directed, ie not symmetric relation\"\n assert test_g.has_edge(\"a\",\"b\")\n assert !(test_g.has_edge(\"b\",\"a\"))\n end",
"def to_s\n res = \"\"\n @graph.each_pair do |key, value|\n res += key.to_s + \": \" + value.join(', ') + \"\\n\"\n end\n res\n end",
"def test_specific_info\n \n # Start a new graph with no vertices\n graph = Graph.new()\n \n # Add 2 vertices to the graph\n origin = { \"code\" => \"NYC\" ,\n \"name\" => \"New York\" ,\n \"country\" => \"US\" ,\n \"continent\" => \"North America\" ,\n \"timezone\" => -5 ,\n \"coordinates\" => { \"N\" => 41, \"W\" => 74 } ,\n \"population\" => 22200000 ,\n \"region\" => 3 }\n \n destination = { \"code\" => \"WAS\" ,\n \"name\" => \"Washington\" ,\n \"country\" => \"US\" ,\n \"continent\" => \"North America\" ,\n \"timezone\" => -5 ,\n \"coordinates\" => {\"N\" => 39, \"W\" => 77} ,\n \"population\" => 8250000 ,\n \"region\" => 3 } \n graph.add_node(origin)\n graph.add_node(destination)\n \n assert_equal(graph.get_specific_info(\"CHI\"), \"City doesn't exist in the graph.\")\n \n # Get information on Washington and check that correct information is\n # contained in the return value\n info = graph.get_specific_info(\"WAS\")\n \n assert_equal( info.include?(\"WAS\"), true )\n assert_equal( info.include?(\"Washington\"), true )\n assert_equal( info.include?(\"US\"), true )\n assert_equal( info.include?(\"North America\"), true )\n assert_equal( info.include?(\"N 39, W 77\"), true )\n assert_equal( info.include?(\"Population: 8250000\"), true )\n assert_equal( info.include?(\"Region: 3\"), true )\n assert_equal( info.include?(\"Direct Connections: WAS\"), false )\n \n # Add an edge and check if that information is reflected in the return value\n graph.add_edge(\"NYC\",\"WAS\" ,570)\n info = graph.get_specific_info(\"NYC\")\n \n assert_equal( info.include?(\"Direct Connections: WAS - 570\"), true)\n\n end",
"def to_graph(passed_graph = RDF::Graph.new)\n passed_graph << graph\n end",
"def strict(name, options = {}, &block)\n GraphViz::DSL.new(name, options.merge( { :type => \"strict digraph\" } ), &block).graph\nend",
"def test_07\n @dg = DiGraph.new([0,0],[6,0],[6,8],[2,6],[8,8],[3,4],[3,2],[3,9],[9,4],[9,6],[4,3],[4,8])\n @paths = Hash.new\n @paths[0] = [0]\n @paths[6] = [0,8]\n @paths[2] = [6]\n @paths[8] = [8]\n @paths[3] = [4,2,9]\n @paths[9] = [4,6]\n @paths[4] = [3,8]\n @nodes = @paths.keys\n received_dg = @dg.strongly_connected_component_including_node(6);\n filled_dg = DiGraph.new(*fill(6));\n if (not filled_dg.equal?(received_dg))\n puts \"test_07 failed...\"\n puts \"DiGraph => #{@dg.to_s}\"\n puts \"node => 6\"\n puts \"expected => #{filled_dg.to_s}\"\n puts \"received => #{received_dg.to_s}\"\n end\n assert_equal(true,filled_dg.equal?(received_dg))\n end",
"def test_add_node_invalid\r\n reader = GraphReader.new('whatever')\r\n assert_nil reader.add_node(nil, nil, [])\r\n end",
"def up(graph)\n raise 'Abstract'\n end",
"def graph?\n false\n end",
"def test_get_node_valid\r\n reader = GraphReader.new('small_graph.txt')\r\n reader.create_graph()\r\n assert_equal reader.graph[1], reader.get_node(1)\r\n end",
"def inspect; to_s; end",
"def inspect; to_s; end",
"def inspect; to_s; end",
"def inspect; to_s; end",
"def inspect; to_s; end",
"def test_05\n @dg = DiGraph.new([0,7],[1,9],[1,4],[7,4],[7,0],[7,9],[3,7],[9,4],[9,7],[9,9],[4,1],[4,4],[4,7])\n @paths = Hash.new\n @paths[0] = [7]\n @paths[1] = [9,4]\n @paths[7] = [4,0,9]\n @paths[3] = [7]\n @paths[9] = [4,7,9]\n @paths[4] = [1,4,7]\n @nodes = @paths.keys\n received_dg = @dg.strongly_connected_component_including_node(0);\n filled_dg = DiGraph.new(*fill(0));\n if (not filled_dg.equal?(received_dg))\n puts \"test_05 failed...\"\n puts \"DiGraph => #{@dg.to_s}\"\n puts \"node => 0\"\n puts \"expected => #{filled_dg.to_s}\"\n puts \"received => #{received_dg.to_s}\"\n end\n assert_equal(true,filled_dg.equal?(received_dg))\n end",
"def test_has_node_dummy_with_obj\n nonexistent_node = Node.new(1, [2])\n refute @graph.node?(nonexistent_node)\n end",
"def test_adding_node\n \n # Start a new graph with no nodes\n graph = Graph.new()\n \n new_vertex = { \"code\" => \"NYC\" ,\n \"name\" => \"New York\" ,\n \"country\" => \"US\" ,\n \"continent\" => \"North America\" ,\n \"timezone\" => -5 ,\n \"coordinates\" => { \"N\" => 41, \"W\" => 74 } ,\n \"population\" => 22200000 ,\n \"region\" => 3 }\n graph.add_node(new_vertex)\n \n assert_equal( graph.nodes().size, 1)\n assert_equal( graph.nodes().has_key?(\"NYC\"), true )\n assert_not_nil( graph.nodes[\"NYC\"] )\n assert_equal( graph.nodes[\"NYC\"].region, 3)\n assert_equal( graph.nodes[\"NYC\"].coordinates[\"N\"], 41)\n\n # Test adding illegal node\n bad_vertex = { \"useless\" => \"information\"}\n\n caught = false\n begin\n graph.add_node(bad_vertex)\n rescue\n caught = true\n end\n\n assert_equal( caught, true )\n\n missing_keys_vertex = { \"code\" => \"A\"}\n\n caught = false\n begin\n graph.add_node(bad_vertex)\n rescue\n caught = true\n end \n \n assert_equal( caught, true )\n\n end",
"def fill_save_graph( graph )\n raise \"#{self.to_s}.fill_save_graph should fill the supplied graph with the data from the object.\"\n end",
"def test_complex_acyclic_graph_acyclic_and_cyclic\n g = Graph.new\n a = Node.new('a')\n b = Node.new('b')\n c = Node.new('c')\n d = Node.new('d')\n e = Node.new('e')\n g.add_node(a)\n g.add_node(b)\n g.add_node(c)\n g.add_node(d)\n g.add_node(e)\n a.add_edge(Edge.new(a, b))\n a.add_edge(Edge.new(a, c))\n b.add_edge(Edge.new(b, c))\n b.add_edge(Edge.new(b, d))\n c.add_edge(Edge.new(c, d))\n c.add_edge(Edge.new(c, e))\n e.add_edge(Edge.new(e, d))\n assert_equal true, g.acyclic?\n assert_equal false, g.cyclic?\n end",
"def initialize(graph)\n @graph = graph\n end",
"def test_06\n @dg = DiGraph.new([5,9],[0,3],[3,8],[8,9],[9,0])\n @paths = Hash.new\n @paths[5] = [9]\n @paths[0] = [3]\n @paths[3] = [8]\n @paths[8] = [9]\n @paths[9] = [0]\n @nodes = @paths.keys\n received_dg = @dg.strongly_connected_component_including_node(0);\n filled_dg = DiGraph.new(*fill(0));\n if (not filled_dg.equal?(received_dg))\n puts \"test_06 failed...\"\n puts \"DiGraph => #{@dg.to_s}\"\n puts \"node => 0\"\n puts \"expected => #{filled_dg.to_s}\"\n puts \"received => #{received_dg.to_s}\"\n end\n assert_equal(true,filled_dg.equal?(received_dg))\n end",
"def test_check_if_vertex_is_source_when_it_doesnt_exist\n assert(@dgraph.check_if_vertex_is_source('no_vertex') == false)\n end",
"def test_complex_cyclic_graph_acyclic_and_cyclic\n g = Graph.new\n a = Node.new('a')\n b = Node.new('b')\n c = Node.new('c')\n d = Node.new('d')\n e = Node.new('e')\n f = Node.new('f')\n g.add_node(a)\n g.add_node(b)\n g.add_node(c)\n g.add_node(d)\n g.add_node(e)\n g.add_node(f)\n a.add_edge(Edge.new(a, b))\n a.add_edge(Edge.new(a, c))\n b.add_edge(Edge.new(b, c))\n b.add_edge(Edge.new(b, d))\n c.add_edge(Edge.new(c, d))\n c.add_edge(Edge.new(c, e))\n e.add_edge(Edge.new(e, d))\n d.add_edge(Edge.new(d, f))\n f.add_edge(Edge.new(f, e))\n assert_equal false, g.acyclic?\n assert_equal true, g.cyclic?\n end",
"def test_has_edges\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('b','c')\n\n assert(graph.has_edges() == true)\n end",
"def dump_list\n list = \"\"\n @graph.each do |from, hash|\n\tlist << \"#{from} => \"\n\ta = []\n\thash.each do |to, relation|\n\t a.push(\"#{to} (#{relation})\")\n\tend\n\tlist << a.join(\", \") + \"\\n\"\n end\n list\n end",
"def to_s\n raise NotImplementedError\n end",
"def graph?\n true\n end",
"def to_s\n if ((@p).equal?(-1))\n fill_buffer\n end\n buf = StringBuffer.new\n i = 0\n while i < @nodes.size\n t = @nodes.get(i)\n buf.append(\" \")\n buf.append(@adaptor.get_type(t))\n i += 1\n end\n return buf.to_s\n end",
"def to_s\n not_implemented\n end",
"def graph_name\n nil\n end",
"def to_s\n\t\traise \"to_s: Not Implemented\"\n\tend",
"def force_convert_to_tnetstring\n ::TNetstring.dump(self.to_hash)\n end",
"def graph\n \n \n @graphml = \"<graphml><key id='label' for='all' attr.name='label' attr.type='string'/><key id='link' for='all' attr.name='link' attr.type='string'/><key id='weight' for='all' attr.name='weight' attr.type='double'/><key id='edgetype' for='edge' attr.name='edgetype' attr.type='string'/><key id='nodetype' for='node' attr.name='nodetype' attr.type='string'/><graph edgedefault='directed'>\" \n @sif = ''\n \n @max = 2\n @log = Array.new\n @additional = {'cleavage' => {}, 'inverse_cleavage' => {}, 'inhibition' => {}, 'inverse_inhibition' => {}, 'interaction' => {}}\n add_nodes(nil,self.protein,nil,0,nil)\n add_additional_nodes(@additional['cleavage'],'cleavage')\n add_additional_nodes(@additional['inverse_cleavage'],'inverse_cleavage')\n add_additional_nodes(@additional['inhibition'],'inhibition')\n add_additional_nodes(@additional['inverse_inhibition'],'inverse_inhibition')\n \n @graphml << \"</graph></graphml>\"\n\n \n # f = File.open(\"#{RAILS_ROOT}/public/images/dynamic/#{self.protein.name}-#{'ppi' if ppi}network.graphml\", 'w')\n # f << @graphml\n # f.close\n\n return @graphml\n end",
"def to_s\n ret = build_link\n ret << tree.to_s if tree\n\n ret\n end",
"def graph\n @g ||= GraphViz.new(:G, :type => :digraph)\n end",
"def to_s\n s = \"\"\n @nodes.each do |identifier, node|\n s += \"#{identifier} (#{node.contigs.join(\",\")}) => #{@edges[identifier]} \\n\"\n end\n s\n end",
"def test_add_node_double\n node = Node.new(1, [1])\n @graph.add_node(node)\n # Assert\n assert_equal(1, @graph.num_nodes)\n end",
"def test_has_edge\n @graph.add_edge('a', 'b');\n\n assert(@graph.has_edge('a', 'b') == true)\n end",
"def to_s\n # Ideally, SymbolicValues shouldn't escape into trace world, and\n # we shouldn't require this if condition. However, they do escape.\n # This is because of the symbolic values being stored in concrete\n # hashes and arrays in SymbolicResult#each or SymbolicArray#map.\n if tracer.tracing?\n return self.ast.to_s\n end\n @to_s ||= begin\n to_s_var = TraceAST::Var.new(\"to_s\")\n var = tracer.new_var_for(TraceAST::Dot.new(self.ast,to_s_var))\n # Except for nil and empty string, to_s never\n # returns an empty string.\n SymbolicNonEmptyString.new var\n end\n end",
"def test_formatting_all_edges\n \n test_map = ' { \"metros\" : [ {\n \"code\" : \"LON\" ,\n \"name\" : \"London\" ,\n \"country\" : \"X\" ,\n \"continent\" : \"X\" ,\n \"timezone\" : 1 ,\n \"coordinates\" : {\"N\" : 1, \"E\" : 1} ,\n \"population\" : 1,\n \"region\" : 1\n } , {\n \"code\" : \"PAR\" ,\n \"name\" : \"Paris\" ,\n \"country\" : \"X\" ,\n \"continent\" : \"X\" ,\n \"timezone\" : 1 ,\n \"coordinates\" : {\"N\" : 1, \"E\" : 1} ,\n \"population\" : 1,\n \"region\" : 1\n } , {\n \"code\" : \"LIM\" ,\n \"name\" : \"Lima\" ,\n \"country\" : \"X\" ,\n \"continent\" : \"X\" ,\n \"timezone\" : 1 ,\n \"coordinates\" : {\"N\" : 1, \"E\" : 1} ,\n \"population\" : 1,\n \"region\" : 1\n } , {\n \"code\" : \"MOW\" ,\n \"name\" : \"Moscow\" ,\n \"country\" : \"X\" ,\n \"continent\" : \"X\" ,\n \"timezone\" : 1 ,\n \"coordinates\" : {\"N\" : 1, \"E\" : 1} ,\n \"population\" : 1,\n \"region\" : 1\n } ] ,\n \"routes\" : [ {\n \"ports\" : [\"LON\" , \"PAR\"] ,\n \"distance\" : 2410\n } , {\n \"ports\" : [\"LON\" , \"MOW\"] ,\n \"distance\" : 4323\n } , {\n \"ports\" : [\"LIM\" , \"PAR\"] ,\n \"distance\" : 4323\n } ] } '\n graph = Graph.new()\n graph.parse_json_string(test_map)\n \n result = graph.format_all_edges()\n \n # All edges are parsed as 2 way, one flight is of the format XXX-XXX\n # Also account for commas inbetween and no comma at the end\n correct_length = 3 * 8 * 2 - 1\n assert_equal( result.length(), correct_length)\n \n # Check contents using a regexp\n assert_equal( result.scan(/[A-Z]{3}-[A-Z]{3},?/).size, 6)\n \n # Check if result contains some particular flights\n assert_equal( result.include?(\"PAR-LIM\"), true)\n assert_equal( result.include?(\"LON-MOW\"), true)\n \n end",
"def test_remove_edge_first_vertex_missing\n\n exception = assert_raises GraphError do\n @dgraph.remove_edge('z','a')\n end\n\n assert_equal('Edge removal error. First vertex could not be found', exception.message)\n end",
"def to_s\n\t\t\"<Node:#{@e}>\"\n\tend",
"def test_check_if_vertex_is_source\n @dgraph = DirectedGraph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n vertex_d = Vertex.new('d')\n @dgraph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c).add_vertex(vertex_d)\n @dgraph.add_edge('a', 'd').add_edge('d', 'c')\n\n assert(@dgraph.check_if_vertex_is_source('a') == true && @dgraph.check_if_vertex_is_source('b') == true)\n end",
"def test_add_node\n node = Node.new(1, [2, 3])\n @graph.add_node(node)\n assert_equal(@graph.num_nodes, 1)\n end",
"def test_initialize_objects\n\t# assert_equal @w.nodes, 0\n\t# assert_equal @w.validWordList, 0\n\tassert_kind_of Graph, @w \n\tassert_kind_of Node, @n\n end",
"def to_s\n\t\t@node_string\n\tend",
"def test_remove_edge\n @dgraph.add_edge('a', 'b');\n @dgraph.remove_edge('a','b')\n\n # 0 and 1 are indexes of vertex a and vertex b respectively\n assert(@dgraph.vertices[0].neighbours[1] == nil)\n end",
"def test_parse\n metro = @my_graph.metro\n @my_graph.analysis_data\n assert_equal(metro['BUE'].population, 13300000, 'Reading does not works')\n assert_equal(metro['SHA'].continent, 'Asia', 'Reading does not 2')\n assert_equal(metro['MIL'].destination['ESS'], 681, 'distance does not work')\n assert_equal(metro['MIL'].destination.length, 3, 'data wrong')\n assert_equal(metro['OSA'].country, 'JP', 'data wrong')\n assert_equal(@my_graph.longest_dist, 12051, 'Distance correct')\n assert_equal(@my_graph.shortest_dist, 334, 'population wrong')\n assert_equal(@my_graph.biggest_pop, 34000000, 'Distance wront')\n assert_equal(@my_graph.biggest_city, 'TYO', 'Distance wront')\n assert_equal(@my_graph.avg_pop, 11796143, 'Avg Population')\n assert_equal(@my_graph.get_distance('MEX', 'LIM'), 4231, 'route')\n assert_equal(@my_graph.get_distance('LIM', 'MEX'), 4231, 'route')\n\n #controllers\n assert_equal(get_time_part2('SFO', @my_graph), 1.5, \"layover\")\n assert_equal(get_time_part2('WAS', @my_graph), 1.5, \"layover\")\n assert_equal(get_time_part1(400), 0.5333333333333333, \"first part of time wrong\")\n assert_equal(get_time_part1(500), 1, \"first part of time wrong\")\n assert_equal(get_time_part1(300), 0.46188021535170065, \"first part of time wrong\")\n dist, money, time = check_routes(@my_graph,['NYC','YYZ','WAS'])\n assert_equal(dist,1143,\"distance wrong\")\n assert_equal(money,371.30000000000007,\"money wrong\")\n assert_equal(time,3.6666666666666665,\"time wrong\")\n\n\n #test add routes or nodes\n assert(@my_graph.has_route('MIA','WAS'))\n @my_graph.remove_single_route('MIA','WAS')\n assert(!@my_graph.has_route('MIA','WAS'))\n assert(@my_graph.has_route('WAS','MIA'))\n\n #test editcity\n @my_graph.update_city_info('BUE', 'POPULATION', 333333333333)\n @my_graph.analysis_data\n assert_equal(@my_graph.biggest_pop, 333333333333, 'Population wrong')\n @my_graph.update_city_info('BUE', 'POPULATION', 1)\n @my_graph.analysis_data\n assert_not_equal(@my_graph, 333333333333, 'Population wrong')\n\n #test dijkstra\n setup\n s = calculate_shortest(@my_graph,'YYZ','MIA')\n assert_equal(s[0], 'YYZ', 'Dijk wrong')\n assert_equal(s[1], 'WAS', 'Dijk wrong')\n assert_equal(s[2], 'MIA', 'Dijk wrong')\n end",
"def printGraph\n\t\tp @g.graphSet\n\t\treturn @g.graphSet\n\tend",
"def to_s\n raw = \"Raw: #{@raw}\"\n puts raw\n puts\n father = \"Father: #{@father.raw if @father}\"\n puts father\n puts\n left_sentence = \"Left: #{@left_sentence.raw if @left_sentence}\"\n puts left_sentence\n puts\n operator = \"Operator: #{@operator.value if @operator}\"\n puts operator\n puts\n right_sentence = \"Right: #{@right_sentence.raw if @right_sentence}\"\n puts right_sentence\n puts\n level = \"Level: #{@level}\"\n puts level\n puts\n end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end",
"def to_s() end"
] | [
"0.8240558",
"0.7790349",
"0.77343917",
"0.767811",
"0.7484956",
"0.68562925",
"0.6537316",
"0.65276414",
"0.65032285",
"0.6472538",
"0.6434005",
"0.63676643",
"0.6310323",
"0.6239692",
"0.6239692",
"0.6225078",
"0.61920583",
"0.61881006",
"0.61529404",
"0.611738",
"0.607198",
"0.6047776",
"0.603007",
"0.60252845",
"0.59967494",
"0.5964779",
"0.5942995",
"0.5932019",
"0.59193045",
"0.5877536",
"0.5835084",
"0.58348775",
"0.5793736",
"0.57631963",
"0.575323",
"0.5728799",
"0.5724177",
"0.5710983",
"0.56906277",
"0.5667842",
"0.5650034",
"0.56469524",
"0.56444156",
"0.56444156",
"0.56444156",
"0.56444156",
"0.56444156",
"0.56382585",
"0.56375945",
"0.5626486",
"0.5615553",
"0.561131",
"0.5599562",
"0.55931026",
"0.55848014",
"0.5555821",
"0.5534556",
"0.55202836",
"0.5500941",
"0.5495593",
"0.5481935",
"0.54805994",
"0.54592454",
"0.5455089",
"0.5450128",
"0.54481065",
"0.5447718",
"0.5428483",
"0.5426323",
"0.5425505",
"0.54240847",
"0.5410706",
"0.54063606",
"0.540346",
"0.53964186",
"0.53936404",
"0.53908175",
"0.537627",
"0.53754693",
"0.53710216",
"0.5366544",
"0.5364866",
"0.5363741",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468",
"0.5357468"
] | 0.8398824 | 0 |
flag and close activity | def reported
@activities = Activity.flagged_or_close.order("created_at desc").
not_undo.paginate(:per_page=>default_per_page,:page=>params[:page])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def close \n #Self trigger window lost focus, since we are going to home\n focus_changed false, @screen_width, @screen_height\n @showing = false\n @activity.moveTaskToBack(true)\n end",
"def closeeye()\n\t\t@opened_eye = false\n\tend",
"def close; true; end",
"def close; true; end",
"def close\n return true\n end",
"def closed\n update_attribute(:status, false)\n end",
"def finish\n\t\tclose\n\tend",
"def close\n if self.openness > 0\n @duration = self.animationSpeed\n @closing = true \n @opening = false\n @animationType = OPEN\n end\n end",
"def close\n # ..\n end",
"def on_close\n\t\tend",
"def close\n return if closed?\n\n nocbreak\n close_screen\n end",
"def close_app\n abort(\"See you next time!\")\n end",
"def can_close?; end",
"def finish\n\t\t\tclose\n\t\tend",
"def on_close\n\tend",
"def close\n\t\t\tKernel.exit 0\n\t\tend",
"def closing?; end",
"def close_when_complete!\n @options[:close] = true\n end",
"def close\n @closed = true\n end",
"def close() end",
"def close() end",
"def close() end",
"def close() end",
"def close\n @@active_state.destroy unless @@active_state.nil?\n super\n self.close!\n end",
"def close() end",
"def close\n\t\tself.status = 'closed'\n\t\tself.save\n\tend",
"def do_close; end",
"def stop\n close\n end",
"def close_account\n @status = 'closed'\n end",
"def closeeye()\n\t\t@pintar = false\n\tend",
"def close\n if @ajar == false\n raise ArgumentError.new(\"The path you seek is redundant\")\n else\n puts \"Door Closure Completed\"\n return @ajar = false\n end\n end",
"def close_account\n @status = \"closed\"\n end",
"def stop; self.app.stop end",
"def finish\n true\n end",
"def close()\n @closed = true\n end",
"def close_account \n @status = \"closed\" \n end",
"def close_notifications_bar\r\n #exec_adb(\"shell service call statusbar 2\")\r\n adb_exec(\"shell cmd statusbar collapse\")\r\nend",
"def close\n abort('Thanks for playing!')\n end",
"def stop\n close\n end",
"def close_window\n end",
"def close\n\n # nothing to do here.\n end",
"def close!\n end",
"def close_account\n self.status = \"closed\"\n end",
"def close_account\n self.status = \"closed\"\n end",
"def on_stop\n # close any open files, sockets, etc.\n end",
"def action_b\n play_cancel_se\n @running = false\n end",
"def close\r\n pop_game_state\r\n end",
"def finish\r\n #\r\n end",
"def onCancel flag, view\n ## puts \"onCancel called\"\n\t\tself.reset(view)\n\tend",
"def close\n @closed = true\n end",
"def close\n end",
"def close\n end",
"def close\n end",
"def close!\n close(true)\n end",
"def closing; end",
"def closing; end",
"def close_account\n self.status = \"closed\"\n end",
"def finish\n @finish || @break_action\n end",
"def close_app\n @bridge.close_app\n end",
"def onCancel(flag, view)\n self.reset(view)\n end",
"def close\n @closed = true\n @on_close and @on_close.call\n end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close; end",
"def close_event\n \tself.update_attribute(:closed, true)\n end",
"def close_toybox\n p 'close_toybox'\n dismissModalViewControllerAnimated(true, completion: nil) \n end",
"def closed\n @closed = true\n end",
"def closed?; end",
"def closed?; end",
"def closed?; end",
"def closed?; end",
"def close\n end",
"def close\n end",
"def close\n end",
"def close\n end",
"def close\n end",
"def close\n end",
"def close\n end"
] | [
"0.63778806",
"0.5755592",
"0.57464164",
"0.57464164",
"0.57203835",
"0.5671849",
"0.5651762",
"0.5592362",
"0.5582573",
"0.5580761",
"0.5578875",
"0.5577521",
"0.55613935",
"0.5553227",
"0.55531317",
"0.55365914",
"0.54902256",
"0.5468107",
"0.5461585",
"0.5455934",
"0.5455934",
"0.5455934",
"0.5455934",
"0.5453638",
"0.5453445",
"0.54503787",
"0.54444",
"0.54410267",
"0.5420223",
"0.5397372",
"0.5391782",
"0.5390992",
"0.53518707",
"0.534478",
"0.53426534",
"0.53325856",
"0.53231525",
"0.53140783",
"0.53050524",
"0.5285664",
"0.5281864",
"0.52752525",
"0.5269001",
"0.5269001",
"0.5264478",
"0.5246136",
"0.5235672",
"0.52339274",
"0.5229349",
"0.5228165",
"0.52232",
"0.52232",
"0.52232",
"0.52196044",
"0.52169263",
"0.52169263",
"0.5216305",
"0.52115357",
"0.52088594",
"0.5207454",
"0.52037275",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5201057",
"0.5199776",
"0.518721",
"0.5176596",
"0.5170817",
"0.5170817",
"0.5170817",
"0.5170817",
"0.5169542",
"0.5169542",
"0.5169542",
"0.5169542",
"0.5169542",
"0.5169542",
"0.5169542"
] | 0.0 | -1 |
Stubs SOAP requests to a given soap_action. | def stubs(soap_action)
setup :stubs, soap_action
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def dispatch(soap_action)\n response = @request.soap @soap\n Response.new response\n end",
"def setup(soap_action, &block)\n @soap = SOAP.new @wsdl.soap_actions[soap_action]\n @wsse = WSSE.new\n\n yield_parameters &block if block\n\n @soap.namespaces[\"xmlns:wsdl\"] = @wsdl.namespace_uri\n @soap.wsse = @wsse\n end",
"def execute_soap_request(action, args, extra_namespaces)\n original_action_name =\n get_service_registry.get_method_signature(action)[:original_name]\n original_action_name = action if original_action_name.nil?\n response = @client.request(original_action_name) do |soap|\n set_headers(soap, args, extra_namespaces)\n end\n return response\n end",
"def request(action, data = nil)\n session.request(\n Endpoint.new.soap_action_name(self.class, action),\n data\n )\n end",
"def apply_stubs(operation_name, stubs); end",
"def soap(soap)\n @soap = soap\n http.endpoint @soap.endpoint.host, @soap.endpoint.port\n http.use_ssl = @soap.endpoint.ssl?\n\n log_request\n @response = http.start do |h|\n h.request request(:soap) { |request| request.body = @soap.to_xml }\n end\n log_response\n @response\n end",
"def request(action, data = nil)\n session.request(\n Endpoint.new.soap_action_name(entity_class, action),\n data\n )\n end",
"def soap_action(operation)\n operation\n end",
"def stub_actions(*action_names)\n if action_names.count == 1 && action_names.first.is_a?(Array)\n action_names = action_names.first\n end\n action_names.each do |name|\n classname = \"VagrantPlugins::Skytap::Action::#{name}\"\n stub_const(classname, double(classname, call: nil))\n end\n end",
"def expects(expected)\n self.action = expected\n\n Savon.config.hooks.define(:spec_action, :soap_request) do |_, request|\n actual = request.soap.input[1]\n raise ExpectationError, \"expected #{action.inspect} to be called, got: #{actual.inspect}\" unless actual == action\n\n respond_with\n end\n\n self\n end",
"def soap_request(options = {})\n options = { error_response_codes: [404] }.merge options\n\n headers = { 'Content-Type' => CONTENT_TYPES[version] }\n if action = options.delete(:action)\n headers['SOAPAction'] = action\n elsif operation = options.delete(:operation)\n headers['SOAPAction'] = soap_action operation\n end\n\n if version == 1 && headers['SOAPAction'].blank?\n raise 'SOAPAction header value must be provided for SOAP 1.1'\n end\n\n request_options = { format: :xml, headers: headers }\n Response.new(version, perform_request(:post, endpoint, options.merge(request_options)), fault_builder).tap do |response|\n raise response.fault if response.fault?\n raise response.error if response.error?\n end\n end",
"def perform_action_with_macro_stubs #:nodoc:\n @spec.send(:run_action!, run_with_expectations?) if @spec.send(:controller)\n end",
"def _generate_wsdl\n @map = self.class.soap_actions\n @namespace = NAMESPACE\n @name = controller_path.gsub('/', '_')\n\n render :template => 'wash_with_soap/wsdl'\n end",
"def stub_actionnetwork_request(path, method:, body: nil)\n stub_request(method, \"https://actionnetwork.org/api/v2#{path}\")\n .with(body: body, headers: { 'OSDI-API-Token' => api_key })\nend",
"def never\n Savon.config.hooks.reject(:spec_action)\n\n Savon.config.hooks.define(:spec_never, :soap_request) do |_, request|\n actual = request.soap.input[1]\n raise ExpectationError, \"expected #{action.inspect} never to be called, but it was!\" if actual == action\n\n respond_with\n end\n\n self\n end",
"def make_request action, params\r\n hash = to_wsdl(params)\r\n tries = 0\r\n\r\n begin\r\n tries += 1\r\n header = authentication_header unless action == 'AuthenticateUser'\r\n response = client.send(\"#{action}!\") do |soap|\r\n soap.namespace = LL_NAMESPACE + '/'\r\n soap.action = LL_NAMESPACE + '/' + action\r\n soap.input = action\r\n soap.header = header\r\n soap.body = hash\r\n end\r\n if response.http.code != '200'\r\n raise \"SYWR: SOAP #{action} failed\"\r\n else\r\n response\r\n end\r\n rescue => e\r\n if tries < 3\r\n reset_token if e == '(detail) Authenticated token has expired or is invalid'\r\n logger.debug \"SYWR: SOAP #{action} failed - #{e}\"\r\n retry\r\n else\r\n logger.warn \"SYWR: SOAP #{action} completely failed - #{e}\"\r\n @errors << 'failed'\r\n nil\r\n end\r\n end\r\n end",
"def send_action(service, action, param)\r\n param_str = \"\"\r\n param.each do |key, value|\r\n param_str << '<%{key}>%{value}</%{key}>'.%(key: key, value: value)\r\n end\r\n xmldata = ActionXML.%(type: service.type, action: action, arguments: param_str)\r\n req = Net::HTTP::Post.new(uri = URI(@igd_location + service.control_url))\r\n req.content_type = 'text/xml; charset=\"utf-8\"'\r\n req['SOAPAction'] = '\"%{type}#%{action}\"'.%(type: service.type, action: action)\r\n res = Net::HTTP.start(uri.hostname, uri.port) do |http|\r\n http.request(req, xmldata)\r\n end\r\n #res = Net::HTTP.request(req, xmldata)\r\n xml = REXML::Document.new(res.body).root\r\n if(res.is_a?(Net::HTTPSuccess))\r\n xml.each_element('s:Body/u:*') do |element|\r\n return {is_error: false, has_xml: true, xml: element}\r\n end\r\n else\r\n return {is_error: true, \r\n faultcode: xml.get_text('s:Body/s:Fault/faultcode').to_s,\r\n faultstring: xml.get_text('s:Body/s:Fault/faultstring').to_s,\r\n code: xml.get_text('s:Body/s:Fault/detail/UPnPError/errorCode').to_s.to_i,\r\n descr: xml.get_text('s:Body/s:Fault/detail/UPnPError/errorDescription').to_s\r\n }\r\n end\r\n return {is_error: false, has_xml: false}\r\n end",
"def set_soap\n @soap = Soap.find(params[:id])\n end",
"def log_request\n log \"SOAP request: #{@soap.endpoint}\"\n log soap_headers.merge(headers).map { |key, value| \"#{key}: #{value}\" }.join(\", \")\n log @soap.to_xml\n end",
"def assert_service_requested(name, method: nil, with: nil)\n service = SERVICES.fetch(name)\n request_method = method || service[:default_method] || :get\n\n stub = a_request(request_method, service[:url])\n stub = stub.with(with) if with.present?\n expect(stub).to have_been_made\n end",
"def _invalid_action\n render_soap_error(\"Cannot find SOAP action mapping for #{request.env['wash_out.soap_action']}\")\n end",
"def set_soap_headers\n self.headers['Content-Type'] = \"text/xml;charset=utf-8\"\n self.headers['SOAPAction'] = \"\\\"\\\"\"\n return \"headers set to soap\"\n end",
"def magic_action(action, user_id=self.user, patient_id = \"\", param_1=nil, param_2=nil, param_3=nil, param_4=nil, param_5=nil, param_6=nil, data=nil)\n begin\n response = self.client.request(\"Magic\", xmlns: \"http://www.allscripts.com/Unity\") do\n http.headers = {\"Accept-Encoding\" => \"gzip, deflate\", \"SOAPAction\" => \"http://www.allscripts.com/Unity/IUnityService/Magic\", \"Content-Type\" => \"text/xml; charset=UTF-8\"}\n soap.body = {\n \"Action\" => action,\n \"UserID\" => user_id,\n \"Appname\" => self.app,\n \"PatientID\" => patient_id,\n \"Token\" => self.security_token,\n \"Parameter1\" => param_1,\n \"Parameter2\" => param_2,\n \"Parameter3\" => param_3,\n \"Parameter4\" => param_4,\n \"Parameter5\" => param_5,\n \"Parameter6\" => param_6,\n \"data\" => data,\n :attributes! => {\"data\" =>{\"xsi:nil\" => true}}\n }\n end\n rescue Timeout::Error\n puts \"Timeout was rescued\"\n 30.times do |i|\n sleep 1\n puts \"#{30 - i} seconds to next retry\"\n end\n puts \"retrying\"\n retry\n end\n return response\n end",
"def operation_from(soap_action)\n return operations[soap_action] if enabled?\n { :action => soap_action.to_soap_key, :input => soap_action.to_soap_key }\n end",
"def send_action action_name, service_name\n self.class.actions[action_name][service_name].run(self)\n end",
"def send_soap_request(soapmsg)\n @log.debug \"Sending SOAP Request:\\n----------------\\n#{soapmsg}\\n----------------\"\n respmsg = @spcon.post(@ws_endpoint, soapmsg)\n @log.debug \"Received SOAP Response:\\n----------------\\n#{Nokogiri::XML(respmsg).to_xml}\\n----------------\"\n respmsg\n end",
"def do_service_action(action)\n validate :service, String\n\n service = request[:service]\n\n begin\n Log.instance.debug(\"Doing #{action} for service #{service}\")\n\n svc = get_puppet(service)\n\n unless action == \"status\"\n svc.send action\n sleep 0.5\n end\n\n reply[\"status\"] = svc.status.to_s\n rescue Exception => e\n reply.fail \"#{e}\"\n end\n end",
"def do_service_action(action)\n validate :service, String\n\n service = request[:service]\n\n begin\n Log.instance.debug(\"Doing #{action} for service #{service}\")\n\n svc = get_puppet(service)\n\n unless action == \"status\"\n svc.send action\n sleep 0.5\n end\n\n reply[\"status\"] = svc.status.to_s\n rescue Exception => e\n reply.fail \"#{e}\"\n end\n end",
"def new_stub_for method_name\n response = Response.new(Http::Request.new, Http::Response.new)\n response.request_type = method_name\n response.request_options = {}\n send(\"simulate_#{method_name}_response\", response)\n response.signal_success\n response\n end",
"def new_stub_for method_name\n response = Response.new(Http::Request.new, Http::Response.new)\n response.request_type = method_name\n response.request_options = {}\n send(\"simulate_#{method_name}_response\", response)\n response.signal_success\n response\n end",
"def soap_call(name, method, params = {})\n begin\n result = @service.request(name) do |soap|\n # soap.action = \"KashFlow/#{method}\"\n\n params = params.pop if params.is_a?(Array)\n params_xml = Gyoku.xml(params, { key_converter: :camelcase }) if params.present?\n\n params_xml = params_xml.gsub(/Id>/,\"ID>\") if params_xml\n params_xml = params_xml.gsub(/Dbid>/,\"DBID>\") if params_xml\n params_xml = params_xml.gsub(/<InvoiceLine>/, \"<InvoiceLine xsi:type=\\\"InvoiceLine\\\">\") if params_xml\n pretext, posttext = object_wrapper(name, params_xml)\n\n soap.xml = %[<?xml version=\"1.0\" encoding=\"utf-8\"?>\n <soap:Envelope xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\">\n <soap:Body>\n <#{method} xmlns=\"KashFlow\">\n <UserName>#{@login}</UserName>\n <Password>#{@password}</Password>\n #{pretext}\n #{params_xml}\n #{posttext}\n </#{method}>\n </soap:Body>\n </soap:Envelope>]\n end.to_hash\n rescue Savon::SOAP::Fault => e\n puts \"soap fault:\" + e.inspect\n return false\n end\n end",
"def process_soap_parameters\n envelope = (request.env['wash_out.soap_data'] || {}).values_at(:envelope, :Envelope).compact.first || {}\n xml_data = (envelope.values_at(:body, :Body).compact.first || {}).with_indifferent_access\n if xml_data.key?(action_name)\n old_parameters = (request.headers[\"action_dispatch.request.parameters\"] || {}).with_indifferent_access\n data = old_parameters.merge(xml_data[action_name])\n request.headers['action_dispatch.request.parameters'] = data.with_indifferent_access\n end\n end",
"def setup_fixtured_request( action, *args )\n\t\turi = '/' + File.join( @appletname, action.to_s )\n\t\treq = Apache::Request.new( uri )\n\n\t\tparams = args.last.is_a?( Hash ) ? args.pop : {}\n\t\tdebug_msg \"Parameters hash set to: %p\" % [params]\n\t\treq.paramtable = params\n\n\t\tdebug_msg \"Request is: %p\" % [req]\n\t\t#txn = Arrow::Transaction.new( req, @config, nil )\n\t\ttxn = flexmock( \"transaction\" )\n\t\ttxn.should_receive( :request ).\n\t\t and_return( req ).zero_or_more_times\n\t\ttxn.should_receive( :vargs= ).\n\t\t with( Arrow::FormValidator ).zero_or_more_times\n\t\t\n\t\tvargs = flexmock( \"form validator\" )\n\t\ttxn.should_receive( :vargs ).\n\t\t\tand_return( vargs ).\n\t\t\tzero_or_more_times\n\t\tvargs.should_receive( :[] ).zero_or_more_times\n\t\t\n\t\tdebug_msg \"Transaction is: %p\" % [txn]\n\t\treturn txn, req, vargs, *args\n\tend",
"def stub_request(*args)\n WebMock::API.stub_request(*args)\n end",
"def soapserver\n server = SOAP::RPC::StandaloneServer.new('SwAServer', '', '0.0.0.0', 7000)\n server.add_servant(Doubler.new)\n server.start\nend",
"def request(action = :echo)\n @request = Rack::MockRequest.new(RocketPants::CacheMiddleware.new(controller_class.action(action)))\n end",
"def stub_request(*args)\n WebMock::API.stub_request(*args)\n end",
"def stub_request(*args)\n WebMock::API.stub_request(*args)\n end",
"def stub_request(*args)\n WebMock::API.stub_request(*args)\n end",
"def stub_request(*args)\n WebMock::API.stub_request(*args)\n end",
"def stub(service_name, &block)\n stubs[service_name] = block\n end",
"def public_methods # :nodoc:\n @client.wsdl.soap_actions\n end",
"def bootstrap_class(class_name)\n if valid_soap_api? class_name\n klass = const_set(class_name.to_sym, Class.new do\n extend Vindicia::Bootstrap::ClassMethods\n end)\n\n klass.client.wsdl.document = determine_wsdl(class_name)\n\n klass.client.wsdl.soap_actions.each do |method|\n bootstrapped_method = bootstrap_method(method)\n klass.module_eval &bootstrapped_method\n end\n end\n end",
"def stubs; end",
"def soap_actions\n @soap_actions ||= stream.operations.keys\n end",
"def method_missing(api_method, *args) # :nodoc:\n @client.wsdl.soap_actions.include?(api_method.to_sym) ? call(api_method, *args) : super\n end",
"def stub_twilio_request\n allow_any_instance_of(TwilioService).to receive(:send_message)\nend",
"def do_action(action, args = {})\n message = <<EOS\n<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:web=\"http://localhost/WebService/\">\n <soapenv:Header/>\n <soapenv:Body>\n <web:GetReport>\n <!--Optional:-->\n <web:Input><![CDATA[<InputData>\n <Session>\n <GroupName>#{group_name}</GroupName>\n <Subscriber_ID>#{subscriber_id}</Subscriber_ID>\n <Group_ID>#{group_id}</Group_ID>\n <User_ID>#{user_id}</User_ID>\n <RequestType>#{request_type}</RequestType>\n <Environment>#{environment}</Environment>\n </Session>\n <Consumer num=\"#{args[:consumer_num]}\">\n <Application>\n <First_Name_1>#{args[:firstname]}</First_Name_1>\n <First_Name_2></First_Name_2>\n <Surname>#{args[:surname]}</Surname>\n <National_ID>#{args[:nationalid]}</National_ID>\n <RSAid>#{args[:rsaid]}</RSAid> <Gender />\n <Birthday>#{args[:birthday]}19221122</Birthday>\n <Residence_Building />\n <Residence_Street />\n <Residence_Suburb />\n <Residence_City />\n <Residence_Postal_Code />\n </Application>\n </Consumer>\n <Enquirer>\n <SubscriberName>SUMMIT FINANCE</SubscriberName>\n <SubscriberTel>(011)000-0000</SubscriberTel>\n </Enquirer></InputData>]]>\n </web:Input>\n </web:GetReport>\n </soapenv:Body>\n</soapenv:Envelope>\nEOS\n\n response = @wsdl.request action do\n soap.xml = message\n end\n result = Nori.parse(response.to_hash[:get_report_response][:output])\n if result == \"Invalid Input Data\"\n raise Experin::InvalidXmlError, \"Input xml string is not valid\"\n else\n p result\n end\n end",
"def initialize(soap_mapping_object)\n @soap_response = soap_mapping_object.send(\n soap_mapping_object.singleton_methods.delete_if { |m| \n m =~ /=$/\n }.first\n )\n end",
"def soapConnect\n printDebugMessage('soapConnect', 'Begin', 11)\n if !@soap\n # Create the service proxy\n @soap = SOAP::WSDLDriverFactory.new(@wsdl).create_rpc_driver\n # Enable compression support if available (requires http-access2).\n begin\n require 'http-access2'\n @soap.streamhandler.accept_encoding_gzip = true\n printDebugMessage('soapConnect', 'Compression support enabled', 1)\n rescue LoadError\n printDebugMessage('soapConnect', 'Compression support not available', 1)\n end\n # Set connection timeouts.\n @soap.options[\"protocol.http.connect_timeout\"] = @timeout\n @soap.options[\"protocol.http.receive_timeout\"] = @timeout\n # Enable trace output.\n @soap.wiredump_dev = STDOUT if @trace\n # Try to set a user-agent.\n begin\n soapUserAgent()\n rescue Exception => ex\n if @debugLevel > 0\n $stderr.puts ex\n $stderr.puts ex.backtrace\n end\n printDebugMessage('soapConnect', 'Unable to set User-agent', 11)\n end\n end\n printDebugMessage('soapConnect', 'End', 11)\n end",
"def stub_service(name, status: 200, method: :get, headers: {}, query: nil, response_body: nil, response_fixture: nil)\n service = SERVICES_MAP[name]\n raise \"No service #{name}\" unless service.present?\n\n response_body = load_fixture(response_fixture || name) if response_body.nil?\n headers = service[:default_headers] if headers.blank?\n\n stub = stub_request(method, service[:url])\n stub.with(query: query) if query.present?\n stub.to_return(status: status, body: response_body, headers: headers)\n end",
"def i_invoke(action, args = {})\n result = case action\n when String\n browser.invoke_service(action, args)\n when ::Waw::ActionController::Action\n browser.invoke_action(action, args)\n else\n raise ArgumentError, \"Unable to apply i_invoke on #{action.inspect}, unable to catch the associated action\"\n end\n assert Net::HTTPSuccess===result, __last_because + \" (invoking #{action.inspect} led to: #{result})\"\n result\n end",
"def _call_action(action)\n send(action)\n end",
"def stub_service(name, status: 200, method: nil, headers: {}, with: nil, response_body: nil, response_fixture: nil)\n service = SERVICES.fetch(name)\n raise \"No service #{name}\" unless service.present?\n\n response_body = load_fixture(response_fixture || name) if response_body.nil?\n headers = service[:default_headers] if headers.blank?\n request_method = method || service[:default_method] || :get\n\n stub = stub_request(request_method, service[:url])\n stub.with(with) if with.present?\n stub.to_return(status: status, body: response_body, headers: headers)\n end",
"def make_request(action, params = {})\n path = params[:path] || \"/\"\n method = params[:method] || \"post\"\n env = Rack::MockRequest.env_for(path, params: params.except(:path).except(:method), method: method)\n status, headers, body = described_class.action(action).call(env)\n @response = ActionDispatch::TestResponse.new(status, headers, body)\n @controller = body.instance_variable_get(:@response).request.env['action_controller.instance']\nend",
"def stubs=(_arg0); end",
"def service_request(service); end",
"def with(soap_body)\n Savon::SOAP::XML.any_instance.expects(:body=).with(soap_body) if mock_method == :expects\n self\n end",
"def invoke_action(name)\n # debugger\n self.send(name)\n render(name.to_s) unless already_built_response?\n end",
"def send_raw(xml)\n open\n @soap_client.ProcessRequest(@ticket, xml)\n close \n end",
"def proxy_action(action)\n Chef::Log.debug(\"[#{new_resource} Running proxied #{action} action\")\n new_resource.subresources.each do |r|\n begin\n r.run_action(action) if r.allowed_actions.include?(action)\n rescue Chef::Exceptions::UnsupportedAction\n # Don't care, just move on.\n end\n end\n end",
"def load_lbtt_convey\n message = { 'ins0:TareRefno': '251', Version: '1', Username: 'VALID.USER', ParRefno: '117' }\n fixture = File.read(\"#{FIXTURES_MOCK_ROOT}lbtt/lbtt_load_convey.xml\")\n @savon.expects(:lbtt_tax_return_wsdl).with(message: message).returns(fixture)\nend",
"def stubs(method_name_or_hash, backtrace = nil)\n iterator = ArgumentIterator.new(method_name_or_hash)\n iterator.each { |*args|\n method_name = args.shift\n ensure_method_not_already_defined(method_name)\n expectation = Expectation.new(self, method_name, backtrace)\n expectation.at_least(0)\n expectation.returns(args.shift) if args.length > 0\n @expectations.add(expectation)\n }\n end",
"def stub_for method_name\n @stubs ||= {}\n @stubs[method_name] ||= new_stub_for(method_name)\n end",
"def call\n @called = true\n handleResponse @stubs\n end",
"def soap_operation_to_method(soap_body)\n method = soap_body.root.name.sub(/Request$/, '').underscore.to_sym\n end",
"def return_ruby_from_soap(action_name, soap_response, out_argument)\n out_arg_name = out_argument[:name]\n #puts \"out arg name: #{out_arg_name}\"\n\n related_state_variable = out_argument[:relatedStateVariable]\n #puts \"related state var: #{related_state_variable}\"\n\n state_variable = @service_state_table.find do |state_var_hash|\n state_var_hash[:name] == related_state_variable\n end\n\n #puts \"state var: #{state_variable}\"\n\n int_types = %w[ui1 ui2 ui4 i1 i2 i4 int]\n float_types = %w[r4 r8 number fixed.14.4 float]\n string_types = %w[char string uuid]\n true_types = %w[1 true yes]\n false_types = %w[0 false no]\n\n if soap_response.success? && soap_response.to_xml.empty?\n log \"<#{self.class}> Got successful but empty soap response!\"\n return {}\n end\n\n if int_types.include? state_variable[:dataType]\n {\n out_arg_name.to_sym => soap_response.\n hash[:Envelope][:Body][\"#{action_name}Response\".to_sym][out_arg_name.to_sym].to_i\n }\n elsif string_types.include? state_variable[:dataType]\n {\n out_arg_name.to_sym => soap_response.\n hash[:Envelope][:Body][\"#{action_name}Response\".to_sym][out_arg_name.to_sym].to_s\n }\n elsif float_types.include? state_variable[:dataType]\n {\n out_arg_name.to_sym => soap_response.\n hash[:Envelope][:Body][\"#{action_name}Response\".to_sym][out_arg_name.to_sym].to_f\n }\n elsif true_types.include? state_variable[:dataType]\n {\n out_arg_name.to_sym => true\n }\n elsif false_types.include? state_variable[:dataType]\n {\n out_arg_name.to_sym => false\n }\n else\n log \"<#{self.class}> Got SOAP response that I dunno what to do with: #{soap_response.hash}\"\n end\n end",
"def test_acknowledgement\n Net::HTTP.any_instance.expects(:request).returns(stub(:body => invoice_data.to_json))\n assert @go_coin.acknowledge\n end",
"def stub_call(method:, message: :any, response:)\n savon.expects(method).with(message: message).returns(response)\n end",
"def initialize(options = {})\n @endpoint, @namespace = options[:endpoint], options[:namespace]\n @soap_action_prefix = \"#{@endpoint}/I#{@namespace.match(/\\/([A-Za-z]+)\\.svc/)[1]}\"\n end",
"def stub_for method_name\n @stubs[method_name] ||= new_stub_for(method_name)\n end",
"def stub(url, stubs)\n @stubs[url_as_regex(url)] = stubs\n end",
"def webmock_1_invoice\n stub_request(:get, \"https://tomas%40vaisar.cz:heslo@tomvaisar.billapp.cz/invoices.xml\").\n with(:headers => {'Accept'=>'application/xml', 'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'User-Agent'=>'Ruby'}).\n to_return(:status => 200, :body => '<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n <invoices type=\"array\">\n <invoice>\n <account-id type=\"integer\">7104</account-id>\n <client-id type=\"integer\">58547</client-id>\n <created-at type=\"datetime\">2015-04-09T11:08:04+02:00</created-at>\n <currency>CZK</currency>\n <due-date type=\"date\">2015-04-19</due-date>\n <has-vat type=\"boolean\">false</has-vat>\n <id type=\"integer\">99941</id>\n <issue-date type=\"date\">2015-04-09</issue-date>\n <notes></notes>\n <number>201500001</number>\n <paid-on type=\"date\" nil=\"true\"></paid-on>\n <status>sent</status>\n <tax-amount type=\"decimal\">0.0</tax-amount>\n <total-amount type=\"decimal\">30000.0</total-amount>\n <updated-at type=\"datetime\">2015-04-09T11:08:05+02:00</updated-at>\n <lines type=\"array\">\n <line type=\"InvoiceLine\">\n <description>Kupa sena</description>\n <item-id type=\"integer\" nil=\"true\"></item-id>\n <quantity type=\"decimal\">1.0</quantity>\n <unit-price type=\"decimal\">30000.0</unit-price>\n <vat type=\"decimal\">0.0</vat>\n </line>\n <line type=\"InvoiceLine\">\n <description>Kupa hnoje</description>\n <item-id type=\"integer\" nil=\"true\"></item-id>\n <quantity type=\"decimal\">1.0</quantity>\n <unit-price type=\"decimal\">5000.0</unit-price>\n <vat type=\"decimal\">0.0</vat>\n </line>\n </lines>\n </invoice>\n </invoices>', :headers => {})\n end",
"def raises_soap_fault\n Savon::SOAP::Response.any_instance.expects(:soap_fault?).returns(true)\n self\n end",
"def execute_action(action_name, args, &block)\n validator = ParametersValidator.new(get_service_registry())\n args = validator.validate_args(action_name, args)\n response = execute_soap_request(\n action_name.to_sym, args, validator.extra_namespaces)\n log_headers(response.http.headers)\n handle_errors(response)\n return extract_result(response, action_name, &block)\n end",
"def mock_request(script = \"test\" , headers = {})\n unless @mock_request\n @mock_request = mock(\"MockAgiRequest[script=#{script}]\")\n @mock_request.stub!(\n :extension => \"test\",\n :priority => \"1\",\n :context => \"default\",\n :uniqueId => \"123456789.7\", \n :language => \"en\",\n :channel => \"SIP/127.0.0.1-00000003\",\n :type => \"SIP\",\n :script => script,\n :requestURL => \"agi://fake_test_host:1234/#{script}\") #[?param1=value1¶m2=value2]. \"\n end\n @mock_request\nend",
"def on_send(node)\n _receiver, method_name, _args = *node\n return unless method_name == :stub\n\n add_offense(node)\n end",
"def _parse_soap_parameters\n soap_action = request.env['wash_out.soap_action']\n action_spec = self.class.soap_actions[soap_action]\n\n # Do not interfere with project-space Nori setup\n strip = Nori.strip_namespaces?\n convert = Nori.convert_tags?\n Nori.strip_namespaces = true\n Nori.convert_tags_to { |tag| tag.snakecase.to_sym }\n\t\n body = request.body.read\n params = Nori.parse(body)\n\n request_doc = REXML::Document.new(body)\n sign_els = REXML::XPath.first(request_doc, \"//ds:Signature\", {\"ds\"=>\"http://www.w3.org/2000/09/xmldsig#\"})\n\n unless sign_els.blank?\n render_soap_error('The signature is invalid.') unless XMLSec.verify_sign(body)\n end\n\t\n encrypted_elements = REXML::XPath.match(request_doc, \"//xenc:EncryptedData\", 'xenc' => 'http://www.w3.org/2001/04/xmlenc#')\n\n unless encrypted_elements.blank?\n begin\n\t decrypted_request = XMLSec.decrypt(body, WS_SECURITY_SETTINGS[\"private_key\"], WS_SECURITY_SETTINGS[\"cert\"])\n rescue => e\n render_soap_error(e.message)\n end\n\tdecrypted_doc = REXML::Document.new decrypted_request\n\tsign_els = REXML::XPath.first(decrypted_doc, \"//ds:Signature\", {\"ds\"=>\"http://www.w3.org/2000/09/xmldsig#\"})\n\n unless sign_els.blank?\n render_soap_error('The signature is invalid.') unless XMLSec.verify_sign(decrypted_request)\n end\n\n params = Nori.parse(decrypted_request)\n end\t\n\n xml_data = params[:envelope][:body][soap_action.underscore.to_sym] || {}\n\n strip_empty_nodes = lambda{|hash|\n hash.each do |key, value|\n if value.is_a? Hash\n value = value.delete_if{|key, value| key.to_s[0] == '@'}\n\n if value.length > 0\n hash[key] = strip_empty_nodes.call(value)\n else\n hash[key] = nil\n end\n end\n end\n\n hash\n }\n\n xml_data = strip_empty_nodes.call(xml_data)\n\n # Reset Nori setup to project-space\n Nori.strip_namespaces = strip\n Nori.convert_tags_to convert\n\n @_params = HashWithIndifferentAccess.new\n\n action_spec[:in].each do |param|\n @_params[param.name] = param.load(xml_data, param.name.to_sym)\n end\n end",
"def dispatch(ews, soapmsg, opts)\n respmsg = post(soapmsg)\n @log.debug <<-EOF.gsub(/^ {6}/, '')\n Received SOAP Response:\n ----------------\n #{Nokogiri::XML(respmsg).to_xml}\n ----------------\n EOF\n opts[:raw_response] ? respmsg : ews.parse_soap_response(respmsg, opts)\n end",
"def receive_action(action)\n self.define_singleton_method(:perform_action, &action)\n end",
"def method_missing(method, *args, &block) #:doc:\n super unless @wsdl.respond_to? method\n\n setup method, &block\n dispatch method\n end",
"def set_soap_namespace\n \n # @client.wsdl.namespaces[\"xmlns:ns2\"] = \"http://www.ups.com/XMLSchema/XOLTWS/Pickup/v1.1\"\n # @client.wsdl.namespaces[\"xmlns:ns3\"] = \"http://www.ups.com/XMLSchema/XOLTWS/UPSS/v1.0\"\n end",
"def request(options = {}, &body)\n soap_xml = request_builder.render(\n body: options.delete(:body) || body,\n header: options.delete(:header),\n )\n options[:body] = soap_xml\n soap_request options\n end",
"def invoke_action(name)\n end",
"def method_missing(method, *args, &block)\n if actions(current_api).include? method\n self.class.send(:define_method, method) do |params|\n perform_soap_call(method, params) || perform_rest_call(params)\n end\n send method, *args, &block\n else\n super\n end\n end",
"def invoke_action(name)\n end",
"def invoke_action(name)\n end",
"def trigger(resource_type_identifier, action_instance)\n raise Occi::Api::Client::Errors::NotImplementedError, \"#{__method__} is just a stub!\"\n end",
"def send_action(action, params={})\n set_params Hash[action: action].merge params\n @response = http_send_action\n end",
"def test_service\n @options[:method] = 'testservice'\n response = ta_response(base_params)\n return response\n end",
"def invoke_action(name)\n self.send(name)\n render unless self.already_built_response?\n\n end",
"def octokit_stubs(&block)\n block ||= proc { |stubs| stubs }\n @octokit_stubs ||= Faraday::Adapter::Test::Stubs.new(&block)\n end",
"def invoke_action(name)\n send(name)\n render(name) unless already_built_response?\n end",
"def _render_soap(result, options)\n @namespace = NAMESPACE\n @operation = soap_action = request.env['wash_out.soap_action']\n action_spec = self.class.soap_actions[soap_action][:out].clone\n result = { 'value' => result } unless result.is_a? Hash\n result = HashWithIndifferentAccess.new(result)\n inject = lambda {|data, spec|\n spec.each do |param|\n if param.struct?\n inject.call(data[param.name], param.map)\n else\n param.value = data[param.name]\n end\n end\n }\n\n soap_response = render_to_string :template => 'wash_with_soap/response',\n :locals => { :result => inject.call(result, action_spec) }\n\n if options[:ws_security] == \"encrypt\" || options[:ws_security] == \"sign\" || options[:ws_security] == \"sign_encrypt\"\n soap_response = ws_security_apply(soap_response, options)\n end\n \n\n\n if is_exception?(soap_response)\n Rails.logger.error \"PHP_SCRIPT_ERROR #{ws_security_response}\"\n render :template => 'wash_with_soap/error', :status => 500,\n :locals => { :error_message => \"php_script_error\" }\n else\n render :xml => soap_response\n end\n end",
"def generate_headers(request, soap)\n super(request, soap)\n credentials = @credential_handler.credentials\n request.url = soap.endpoint\n request.headers['Authorization'] =\n @auth_handler.auth_string(credentials)\n end",
"def soap_params\n params.require(:soap).permit(:soap_name, :soap_age, :director_id)\n end",
"def stub_server_interface\n @handler.stubs(:accept_header ).returns \"format_one,format_two\"\n @handler.stubs(:content_type_header).returns \"text/yaml\"\n @handler.stubs(:set_content_type ).returns \"my_result\"\n @handler.stubs(:set_response ).returns \"my_result\"\n @handler.stubs(:path ).returns \"/my_handler/my_result\"\n @handler.stubs(:http_method ).returns(\"GET\")\n @handler.stubs(:params ).returns({})\n @handler.stubs(:content_type ).returns(\"text/plain\")\n end",
"def stub_cas_first_login(pending_sage_user, pending_sage_user_email, source='billingboss')\n unstub_cas\n stub_cas_check_status\n RAILS_DEFAULT_LOGGER.debug(\"stubbing for first_login. pending_sage_user: #{pending_sage_user} pending_sage_user_email: #{pending_sage_user_email.inspect}\") \n CASClient::Frameworks::Rails::Filter.send :define_method, :handle_authentication do\n RAILS_DEFAULT_LOGGER.debug \"in stub for first_login. pending_sage_user: #{pending_sage_user} pending_sage_user_email: #{pending_sage_user_email.inspect}\" \n controller.session[:sage_user] = pending_sage_user\n controller.session[:cas_extra_attributes] = {:username => pending_sage_user, :email => pending_sage_user_email, :source => source}.with_indifferent_access \n return (returns_url? ? service_url : true)\n end\n end",
"def generate_headers(request, soap)\n credentials = @credential_handler.credentials\n headers = @auth_handler.headers(credentials)\n request_header = headers.inject({}) do |request_header, (header, value)|\n if header == :access_token\n request.url = soap.endpoint\n request.headers['Authorization'] =\n @auth_handler.generate_oauth_parameters_string(credentials,\n request)\n else\n request_header[prepend_namespace(header)] = value\n end\n request_header\n end\n soap.header[prepend_namespace(@element_name)] = request_header\n end",
"def _dispatch(action)\n _run_filters(:before, action)\n response = send(action)\n _run_filters(:after, action)\n response\n end"
] | [
"0.7005628",
"0.6440213",
"0.63615036",
"0.6125479",
"0.6076433",
"0.60549504",
"0.58985615",
"0.5896121",
"0.58701813",
"0.5791605",
"0.5756803",
"0.5471734",
"0.5461388",
"0.5436634",
"0.5424589",
"0.54219496",
"0.54078627",
"0.53144324",
"0.53101104",
"0.5275829",
"0.5274736",
"0.5251836",
"0.5250061",
"0.52449346",
"0.5241232",
"0.5234513",
"0.51934534",
"0.51866305",
"0.5173251",
"0.5173251",
"0.51477265",
"0.51414883",
"0.5102004",
"0.50777715",
"0.5077056",
"0.505723",
"0.50172555",
"0.50172555",
"0.50172555",
"0.50172555",
"0.5012286",
"0.4951979",
"0.49293557",
"0.49040565",
"0.4897457",
"0.4881238",
"0.48695266",
"0.48120835",
"0.48066804",
"0.4788998",
"0.47740865",
"0.4743872",
"0.4732346",
"0.47313726",
"0.47310814",
"0.4730265",
"0.47299302",
"0.472841",
"0.47282624",
"0.47231668",
"0.47162724",
"0.47050607",
"0.46976656",
"0.4692047",
"0.46824703",
"0.46687487",
"0.46674046",
"0.46617094",
"0.46468812",
"0.46180946",
"0.46159732",
"0.461021",
"0.46088687",
"0.4605475",
"0.45974943",
"0.4595817",
"0.45896938",
"0.45892024",
"0.45592076",
"0.4558863",
"0.45544645",
"0.45446545",
"0.45341793",
"0.45136723",
"0.45127693",
"0.45119223",
"0.45119223",
"0.4510306",
"0.44873205",
"0.44861755",
"0.44796562",
"0.44636172",
"0.4463298",
"0.44632807",
"0.44603005",
"0.44463184",
"0.44444942",
"0.44396576",
"0.44313055",
"0.44286516"
] | 0.80295694 | 0 |
Expects a given SOAP body Hash to be used. | def with(soap_body)
Savon::SOAP::XML.any_instance.expects(:body=).with(soap_body) if mock_method == :expects
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_soap_body\n envelope = self[self.keys.first] || {}\n body_key = envelope.keys.find { |key| /.+:Body/ =~ key } rescue nil\n body_key ? envelope[body_key].map_soap_response : {}\n end",
"def soap_body(env)\n env['rack.input'].respond_to?(:string) ? env['rack.input'].string\n : env['rack.input'].read\n end",
"def from_hash(hash)\n super(hash)\n verify\n end",
"def initialize(hash)\n @header = Msg::Header.new(hash)\n @body = Msg::Body.new(content_is_json?, hash)\n end",
"def get_message_from_thrift_binary(body)\n binary_string = Base64.decode64(body)\n rmessage = nil\n @@deserializer_lock.synchronize do\n rmessage = @@deserializer.deserialize(Messagebus::Thrift::MessageInternal.new, binary_string)\n end\n Messagebus::Message.create_message_from_message_internal(rmessage)\n end",
"def assert_hash!(hash)\n unless hash.kind_of?(Hash)\n raise \"Passed parameter must be of type Hash, got: #{hash}\"\n end\n end",
"def process_request(legacy_xml_or_hash)\n c.soap(:ProcessRequest, { \n :ProcessRequest => { \n :_attributes => { :xmlns => 'http://wildwestdomains.com/webservices/' },\n :sRequestXML => \n c.class.escape_html(\n \"<wapi clTRID='#{GoDaddyReseller::API.next_uid[0..50]}'\" + \n \" account='#{user_id}' pwd='#{password}'>\" +\n \"#{legacy_xml_or_hash.is_a?(Hash) ? c.class.xml_encode_hash(legacy_xml_or_hash) : legacy_xml_or_hash.to_s}\" +\n \"</wapi>\"\n )\n }\n }\n )\n end",
"def transaction_for_hash(hash)\n raise \"Not Implemented\"\n end",
"def verify_signature(payload_body)\n signature = 'sha1=' + OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha1'), SECRET_TOKEN, payload_body)\n return halt 500, \"Signatures didn't match!\" unless Rack::Utils.secure_compare(signature, request.env['HTTP_X_HUB_SIGNATURE'])\n end",
"def verify_signature(payload_body)\n signature = 'sha1=' + OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha1'), SECRET_TOKEN, payload_body)\n return halt 500, \"Signatures didn't match!\" unless Rack::Utils.secure_compare(signature, request.env['HTTP_X_HUB_SIGNATURE'])\n end",
"def merge_params_from_body(_params = params)\n _params = _params.dup\n _params_from_body = parse_body\n _params = _params_from_body.merge(_params) if _params_from_body.is_a?(Hash)\n indifferent_hash.merge(_params)\n end",
"def makehash(body)\n Log.debug(\"Creating message hash using #{private_key_file}\")\n\n sign(private_key_file, body.to_s)\n end",
"def check_signature(body)\n received_signature = request.env['HTTP_X_HUB_SIGNATURE'] || ''\n signature = 'sha1=' + hmac_sha1(settings.github_secret, body)\n\n if !Rack::Utils.secure_compare(signature, received_signature)\n build_failed('signature mismatch')\n end\nend",
"def test_body_with_binary(body, opts = {})\n test_body_with_binary_with_http_info(body, opts)\n nil\n end",
"def verify_signature(payload_body)\n signature = 'sha1=' + OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha1'), ENV['WEBHOOK_SECRET_TOKEN'], payload_body)\n return halt 500, \"Signatures didn't match!\" unless Rack::Utils.secure_compare(signature, request.env['HTTP_X_HUB_SIGNATURE'])\nend",
"def unpack_body(body)\n # fixed fields\n self.action = body.slice!(0)\n self.priv_lvl = body.slice!(0)\n self.authen_type = body.slice!(0)\n self.service = body.slice!(0)\n self.user_len = body.slice!(0)\n self.port_len = body.slice!(0)\n self.rem_addr_len = body.slice!(0)\n self.data_len = body.slice!(0)\n \n # variable fields\n self.user = body.slice!(0..(@user_len - 1)) if (@user_len != 0)\n self.port = body.slice!(0..(@port_len - 1)) if (@port_len != 0)\n self.rem_addr = body.slice!(0..(@rem_addr_len - 1)) if (@rem_addr_len != 0)\n self.data = body.slice!(0..(@data_len - 1)) if (@data_len != 0) \n return(nil)\n end",
"def initialize(mailbox, hash, headers, raw)\n @hash = hash\n @mailbox = mailbox\n @headers = headers\n @raw = raw\n end",
"def with(body = nil, &block)\n Savon.config.hooks.define(:spec_body, :soap_request) do |_, request|\n if block\n block.call(request)\n else\n actual = request.soap.body\n raise ExpectationError, \"expected #{body.inspect} to be sent, got: #{actual.inspect}\" unless actual == body\n end\n\n respond_with\n end\n\n self\n end",
"def unpack_body(body)\n # fixed fields\n self.flags = body.slice!(0)\n self.authen_method = body.slice!(0)\n self.priv_lvl = body.slice!(0)\n self.authen_type = body.slice!(0)\n self.authen_service = body.slice!(0)\n self.user_len = body.slice!(0)\n self.port_len = body.slice!(0)\n self.rem_addr_len = body.slice!(0)\n self.arg_cnt = body.slice!(0)\n \n # variable fields\n @arg_lens = (body.slice!(0..(@arg_cnt - 1))).unpack('C*') if (@arg_cnt != 0)\n @user = body.slice!(0..(@user_len - 1)) if (@user_len != 0)\n @port = body.slice!(0..(@port_len - 1)) if (@port_len != 0)\n @rem_addr = body.slice!(0..(@rem_addr_len - 1)) if (@rem_addr_len != 0)\n \n if (self.arg_cnt != 0)\n @args = []\n @arg_lens.each {|x| @args.push( body.slice!( 0..(x - 1) ) )}\n end\n \n return(nil)\n end",
"def block_header_for_hash(hash)\n guard_block_index_access { @block_index.block_header_for_hash(hash) } || valid_block_for_hash(hash)\n end",
"def validate(hash)\n @is_sandbox = hash[:sandbox] || hash['sandbox'] || false\n raise ArgumentError, \"Sandbox must be type of boolean\" unless @is_sandbox == false || @is_sandbox == true\n @auth_token = hash[:auth_token] || hash['auth_token'] || \"\"\n raise ArgumentError, \"Must specify from auth token\" if @auth_token.empty?\n @notebook_name = to_utf8(hash[:notebook] || hash['notebook'] || \"\")\n raise ArgumentError, \"Must specify from notebook\" if @notebook_name.empty?\n @stack_name = to_utf8(hash[:stack] || hash['stack'])\n @tags = to_utf8(hash[:tags] || hash['tags'] || [])\n end",
"def unenvelope_body(body)\n body[envelope_key] || body['data']\n end",
"def unenvelope_body(body)\n body[envelope_key] || body['data']\n end",
"def unenvelope_body(body)\n body[envelope_key] || body['data']\n end",
"def build_soap_message(params)\n Hash[params.map { |k, v| [\"v2:#{k}\", v] }]\n end",
"def soap_call(name, method, params = {})\n begin\n result = @service.request(name) do |soap|\n # soap.action = \"KashFlow/#{method}\"\n\n params = params.pop if params.is_a?(Array)\n params_xml = Gyoku.xml(params, { key_converter: :camelcase }) if params.present?\n\n params_xml = params_xml.gsub(/Id>/,\"ID>\") if params_xml\n params_xml = params_xml.gsub(/Dbid>/,\"DBID>\") if params_xml\n params_xml = params_xml.gsub(/<InvoiceLine>/, \"<InvoiceLine xsi:type=\\\"InvoiceLine\\\">\") if params_xml\n pretext, posttext = object_wrapper(name, params_xml)\n\n soap.xml = %[<?xml version=\"1.0\" encoding=\"utf-8\"?>\n <soap:Envelope xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\">\n <soap:Body>\n <#{method} xmlns=\"KashFlow\">\n <UserName>#{@login}</UserName>\n <Password>#{@password}</Password>\n #{pretext}\n #{params_xml}\n #{posttext}\n </#{method}>\n </soap:Body>\n </soap:Envelope>]\n end.to_hash\n rescue Savon::SOAP::Fault => e\n puts \"soap fault:\" + e.inspect\n return false\n end\n end",
"def _parse_soap_parameters\n soap_action = request.env['wash_out.soap_action']\n action_spec = self.class.soap_actions[soap_action]\n\n # Do not interfere with project-space Nori setup\n strip = Nori.strip_namespaces?\n convert = Nori.convert_tags?\n Nori.strip_namespaces = true\n Nori.convert_tags_to { |tag| tag.snakecase.to_sym }\n\t\n body = request.body.read\n params = Nori.parse(body)\n\n request_doc = REXML::Document.new(body)\n sign_els = REXML::XPath.first(request_doc, \"//ds:Signature\", {\"ds\"=>\"http://www.w3.org/2000/09/xmldsig#\"})\n\n unless sign_els.blank?\n render_soap_error('The signature is invalid.') unless XMLSec.verify_sign(body)\n end\n\t\n encrypted_elements = REXML::XPath.match(request_doc, \"//xenc:EncryptedData\", 'xenc' => 'http://www.w3.org/2001/04/xmlenc#')\n\n unless encrypted_elements.blank?\n begin\n\t decrypted_request = XMLSec.decrypt(body, WS_SECURITY_SETTINGS[\"private_key\"], WS_SECURITY_SETTINGS[\"cert\"])\n rescue => e\n render_soap_error(e.message)\n end\n\tdecrypted_doc = REXML::Document.new decrypted_request\n\tsign_els = REXML::XPath.first(decrypted_doc, \"//ds:Signature\", {\"ds\"=>\"http://www.w3.org/2000/09/xmldsig#\"})\n\n unless sign_els.blank?\n render_soap_error('The signature is invalid.') unless XMLSec.verify_sign(decrypted_request)\n end\n\n params = Nori.parse(decrypted_request)\n end\t\n\n xml_data = params[:envelope][:body][soap_action.underscore.to_sym] || {}\n\n strip_empty_nodes = lambda{|hash|\n hash.each do |key, value|\n if value.is_a? Hash\n value = value.delete_if{|key, value| key.to_s[0] == '@'}\n\n if value.length > 0\n hash[key] = strip_empty_nodes.call(value)\n else\n hash[key] = nil\n end\n end\n end\n\n hash\n }\n\n xml_data = strip_empty_nodes.call(xml_data)\n\n # Reset Nori setup to project-space\n Nori.strip_namespaces = strip\n Nori.convert_tags_to convert\n\n @_params = HashWithIndifferentAccess.new\n\n action_spec[:in].each do |param|\n @_params[param.name] = param.load(xml_data, param.name.to_sym)\n end\n end",
"def headers=(hash); end",
"def headers=(hash); end",
"def set_hash(hash, hash_type = 'SHA256')\n @hash = hash\n @hash_type = hash_type\n end",
"def processed_block_for_hash(hash)\n if Toshi::Models::Block.where(hsh: hash).empty?\n return nil\n end\n stored_block = Toshi::Models::RawBlock.where(hsh: hash).first\n if !stored_block\n return nil\n end\n Bitcoin::Protocol::Block.new(stored_block.payload)\n end",
"def sanitize_hash(hash)\n return {} if hash.nil? \n Hash.from_xml(hash.to_xml)['hash']\n end",
"def createShippedTeamEmailBody(order_hash)\t\n\t\tbody = <<EOM\t\nOrderNum: #{order_hash[:order_number]}<br/>\nShipMethod: #{order_hash[:ship_option]}<br/>\nTrackingNum: #{order_hash[:tracking_number]}<br/>\nShipToFirstName: #{order_hash[:ship_to_first_name]}<br/>\nShipToLastName: #{order_hash[:ship_to_last_name]}<br/>\nShipToCompany: #{order_hash[:ship_to_company]}<br/>\nShipToAddr1: #{order_hash[:ship_to_addr1]}<br/>\nShipToAddr2: #{order_hash[:ship_to_addr2]}<br/>\nShipToCity: #{order_hash[:ship_to_city]}<br/>\nShipToState: #{order_hash[:ship_to_state]}<br/>\nShipToZip: #{order_hash[:ship_to_zip]}<br/>\nEOM\n\n\t\tbody\n\tend",
"def params=(hash); end",
"def params=(hash); end",
"def get_hash(*params); raise('Stub or mock required.') end",
"def unbox(hash)\n @server_message = hash['ServerMessage']\n @server_code = hash['ServerCode']\n @model = Validate.from_hash(hash['model']) if hash['model']\n end",
"def parse_body(body)\n if body\n if body.is_a?(Hash)\n body\n else\n begin\n data = JSON.parse(body)\n data\n rescue JSON::ParserError\n {}\n end\n end\n else\n {}\n end\nend",
"def normalize_body(normalized_headers, body)\n case body\n when nil\n return body\n when ::Hash, ::Array\n body_hash = body\n when ::String\n # use unparsed original body to avoid losing information when we are\n # unable to parse or parse a literal JSON string as happens in the case\n # of RightAPI's health-check.\n return @body\n end\n case ct = compute_content_type(normalized_headers)\n when 'application/x-www-form-urlencoded'\n result = build_query_string(body_hash)\n normalize_content_length(normalized_headers, result)\n else\n result = ::JSON.dump(body_hash)\n normalize_content_length(normalized_headers, result)\n end\n result\n end",
"def convert_back_office_hash(body)\n @case_references = []\n @case_ref_nos = []\n ServiceClient.iterate_element(body[:application_cases]) do |cases|\n @case_references.push(cases[:case_reference])\n @case_ref_nos.push(cases[:case_refno])\n end\n end",
"def check!(hash)\n build!(hash)\n return hash\n end",
"def create_get_verification_state_message(body)\n\n # prepare query url\n _query_builder = Configuration.base_uri.dup\n _query_builder << '/rest/verification/message'\n _query_url = APIHelper.clean_url _query_builder\n\n # prepare headers\n _headers = {\n 'accept' => 'application/json',\n 'content-type' => 'application/json; charset=utf-8'\n }\n\n # prepare and execute HttpRequest\n _request = @http_client.post _query_url, headers: _headers, parameters: body.to_json\n BasicAuth.apply(_request)\n _context = execute_request(_request)\n\n # validate response against endpoint and global error codes\n if _context.response.status_code == 400\n raise APIException.new 'Unexpected error in API call. See HTTP response body for details.', _context\n elsif _context.response.status_code == 401\n raise APIException.new '', _context\n end\n validate_response(_context)\n\n # return appropriate response type\n decoded = APIHelper.json_deserialize(_context.response.raw_body)\n return GetVerificationStateMessageResponseModel.from_hash(decoded)\n end",
"def initialize(raw_hash)\n if valid_hash?(raw_hash)\n self.replace(raw_hash)\n @version, @cost, @salt, @checksum = split_hash(self)\n else\n raise Errors::InvalidHash.new(\"invalid hash\")\n end\n end",
"def initialize(raw_hash)\n if valid_hash?(raw_hash)\n self.replace(raw_hash)\n @version, @cost, @salt, @checksum = split_hash(self)\n else\n raise Errors::InvalidHash.new(\"invalid hash\")\n end\n end",
"def unpack_body(body)\n # fixed-length fields\n self.user_msg_len = body.slice!(0..1)\n self.data_len = body.slice!(0..1)\n self.flags = body.slice!(0)\n \n # variable-length fields\n @user_msg = body.slice!(0..(@user_msg_len - 1)) if (@user_msg_len != 0)\n @data = body.slice!(0..(@data_len - 1)) if (@data_len != 0)\n\n return(nil)\n end",
"def process_soap_parameters\n envelope = (request.env['wash_out.soap_data'] || {}).values_at(:envelope, :Envelope).compact.first || {}\n xml_data = (envelope.values_at(:body, :Body).compact.first || {}).with_indifferent_access\n if xml_data.key?(action_name)\n old_parameters = (request.headers[\"action_dispatch.request.parameters\"] || {}).with_indifferent_access\n data = old_parameters.merge(xml_data[action_name])\n request.headers['action_dispatch.request.parameters'] = data.with_indifferent_access\n end\n end",
"def verify_compact_signature(signature, hash)\n raise BTCError, \"Not implemented\"\n end",
"def parse(body)\n xml = Nokogiri::XML(body)\n {\n :unique_id => xml.xpath('//Response/Transaction/Identification/UniqueID').text,\n :status => translate_status_code(xml.xpath('//Response/Transaction/Processing/Status/@code').text),\n :reason => translate_status_code(xml.xpath('//Response/Transaction/Processing/Reason/@code').text),\n :message => xml.xpath('//Response/Transaction/Processing/Return').text\n }\n end",
"def update(para)\n if para.kind_of? Hash\n from_hash(para)\n end\n\nend",
"def create(body)\n raise ArgumentError unless body.is_a?(Hash)\n api.post('', body, 'application/json')\n end",
"def validate_query_hash(query, hash)\n validate_query(query)\n validate_hash(hash)\n end",
"def valid_integrity?(body, headers)\n request_signature = headers['X-Hub-Signature']\n signature_parts = request_signature.split('sha1=')\n request_signature = signature_parts[1]\n calculated_signature = OpenSSL::HMAC.hexdigest('sha1', @secret, body)\n calculated_signature == request_signature\n end",
"def initialize(body)\n @body = body\n end",
"def initialize(body)\n @body = body\n end",
"def initialize(body)\n @body = body\n end",
"def initialize(body)\n @body = body\n end",
"def consume_hash(subject, hash)\n result = @argument_list.inject({}) do |result, arg|\n result.merge arg.consume_hash(subject, hash)\n end\n unless @name.nil?\n result[@name] = parse(subject, hash[@name])\n end\n return result\n end",
"def consume_hash(subject, hash)\n result = @argument_list.inject({}) do |result, arg|\n result.merge arg.consume_hash(subject, hash)\n end\n unless @name.nil?\n result[@name] = parse(subject, hash[@name])\n end\n return result\n end",
"def load_from_hash!(hash)\n hash.each do |k, v|\n m = (k.to_s + '=').to_sym\n raise InvalidRequestError, \"Invalid key #{k.inspect}\" unless respond_to?(m)\n send(m, v)\n end\n\n self\n end",
"def secure_hash\r\n params['secureHash']\r\n end",
"def set_token_from_hash(auth_hash, user_hash)\n self.update_attribute(:name, user_hash[:name]) if self.name.blank?\n self.update_attribute(:email, user_hash[:email]) if self.email.blank?\n token = self.authentications.find_or_initialize_by_provider_and_uid(auth_hash[:provider], auth_hash[:uid])\n token.update_attributes(\n :name => auth_hash[:name],\n :link => auth_hash[:link],\n :access_token => auth_hash[:token],\n :secret => auth_hash[:secret]\n )\n end",
"def set_checksums_by_ruby_hash(ruby_hash)\n self.update checksums: JSON.dump(ruby_hash)\n end",
"def initialize(raw_hash)\n if valid_hash?(raw_hash)\n self.replace(raw_hash)\n @cost, @salt, @digest = split_hash(self.to_s)\n else\n raise Errors::InvalidHash.new(\"invalid hash\")\n end\n end",
"def validate_update(body, headers)\n unless @secret\n raise AppSecretNotDefinedError, \"You must init RealtimeUpdates with your app secret in order to validate updates\"\n end\n\n request_signature = headers['X-Hub-Signature'] || headers['HTTP_X_HUB_SIGNATURE']\n return unless request_signature\n\n signature_parts = request_signature.split(\"sha1=\")\n request_signature = signature_parts[1]\n calculated_signature = OpenSSL::HMAC.hexdigest('sha1', @secret, body)\n calculated_signature == request_signature\n end",
"def passive\t\t\n\tif @body.size > 4\n\t\thash=Digest::MD5.hexdigest(@body[0..500])\t\n\t\t[{:name=>\"hash\",:string=>hash}]\n\telse\n\t\t[]\n\tend\nend",
"def create(body)\n raise ArgumentError unless body.is_a?(Hash)\n\n api.post('', body, 'application/json')\n end",
"def create(body)\n raise ArgumentError unless body.is_a?(Hash)\n\n api.post('', body, 'application/json')\n end",
"def create(body)\n raise ArgumentError unless body.is_a?(Hash)\n\n api.post('', body, 'application/json')\n end",
"def create(body)\n raise ArgumentError unless body.is_a?(Hash)\n\n api.post('', body, 'application/json')\n end",
"def parse_body(buffer)\n return if buffer.bytesize == body_length\n\n raise ProtocolException, \"Failed to parse packet - input buffer (#{buffer.bytesize}) is not the same as the body length header (#{body_length})\"\n end",
"def compact_signature(hash)\n raise BTCError, \"Not implemented\"\n end",
"def post_params=(new_param_hash_or_str)\n # First see if this is a body payload\n if !new_param_hash_or_str.kind_of?(Hash)\n compose_verbatim_payload(new_param_hash_or_str)\n # then check if anything in the new param hash resembles an uplaod\n elsif extract_values(new_param_hash_or_str).any?{|value| value.respond_to?(:original_filename) }\n compose_multipart_params(new_param_hash_or_str)\n else\n compose_urlencoded_params(new_param_hash_or_str)\n end\n end",
"def update(hash); end",
"def createPrinterReceivedEmailBody(order_hash)\n\t\tbody = <<EOM\t\nOrderNum: #{order_hash[:order_number]}<br/>\nShipMethod: #{order_hash[:ship_option]}<br/>\nShipToFirstName: #{order_hash[:ship_to_first_name]}<br/>\nShipToLastName: #{order_hash[:ship_to_last_name]}<br/>\nShipToCompany: #{order_hash[:ship_to_company]}<br/>\nShipToAddr1: #{order_hash[:ship_to_addr1]}<br/>\nShipToAddr2: #{order_hash[:ship_to_addr2]}<br/>\nShipToCity: #{order_hash[:ship_to_city]}<br/>\nShipToState: #{order_hash[:ship_to_state]}<br/>\nShipToZip: #{order_hash[:ship_to_zip]}<br/>\nShipToPhone: #{order_hash[:ship_to_phone]}<br/>\n<br/>\nOrder Items:<br/>\nEOM\n\n\t\tbody\n\tend",
"def check(hash)\n # not implemented\n end",
"def initialize(hash)\n raise \"hash should be a Hash object #{hash.to_s}\" unless hash.is_a? Hash\n @hash = hash\n end",
"def body_hash_required?\n false\n end",
"def valid_block_for_hash(hash)\n if !Toshi::Models::Block.main_or_side_branch.where(hsh: hash).first\n return nil\n end\n stored_block = Toshi::Models::RawBlock.where(hsh: hash).first\n if !stored_block\n return nil\n end\n Bitcoin::Protocol::Block.new(stored_block.payload)\n end",
"def initialize(body = \"\", payload = {})\n\t\tself.body = body\n\t\tpayload.each do |attr, val|\n\t\t\tsend(\"#{attr}=\", val) if attributes.has_key?(attr.to_s)\n\t\tend\n\tend",
"def makehash(body)\n signer = SSH::Key::Signer.new\n if @config.pluginconf[\"sshkey\"]\n signer.add_key_file(@config.pluginconf[\"sshkey\"])\n signer.use_agent = false\n end\n signatures = signer.sign(body).collect { |s| s.signature }\n return Marshal.dump(signatures)\n end",
"def outgoing_body_filter(attr_hash)\n attr_hash\n end",
"def parse_body\n Nori.new.parse(body)['Response']\n end",
"def request_body! body\n enum_of_body(body).each do |body_part|\n body_part = body_part.to_s\n rv = Wrapper.msc_append_request_body txn_ptr, (strptr body_part), body_part.bytesize\n rv == 1 or raise Error, \"msc_append_request_body failed for #{truncate_inspect body_part}\"\n end\n\n # This MUST be called, otherwise rules aren't triggered.\n rv = Wrapper.msc_process_request_body txn_ptr\n rv == 1 or raise Error, \"msc_process_request_body failed\"\n\n intervention!\n end",
"def handle_message(message_hash)\n socket_id = message_hash[\"socket_id\"]\n message = message_hash[\"message\"]\n type = message[\"type\"]\n\n post(send(type, message_hash))\n end",
"def merge_params_from_body(_params = params)\n _params = _params.dup\n if request.media_type == 'application/json'\n request.body.rewind\n body_contents = request.body.read\n logger.debug { \"Parsing: '#{body_contents}'\" }\n if body_contents\n json_params = JSON.parse(body_contents)\n if json_params.is_a?(Hash)\n #json_params = indifferent_hash.merge(json_params)\n _params = json_params.merge(_params)\n else\n _params['body'] = json_params\n end\n end\n end\n indifferent_hash.merge(_params)\n end",
"def unpack_body(body)\n # fixed-length fields\n self.server_msg_len = body.slice!(0..1)\n self.data_len = body.slice!(0..1)\n self.status = body.slice!(0)\n \n # variable-length fields\n @server_msg = body.slice!(0..(@server_msg_len - 1)) if (@server_msg_len != 0)\n @data = body.slice!(0..(@data_len - 1)) if (@data_len != 0)\n\n return(nil)\n end",
"def incoming_body_filter(attr_hash)\n attr_hash\n end",
"def check_process_builder_payload\r\n xml = %Q{<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\r\n <soapenv:Header>\r\n <work:WorkContext xmlns:work=\"http://bea.com/2004/06/soap/workarea/\">\r\n <java version=\"1.8\" class=\"java.beans.XMLDecoder\">\r\n <void id=\"url\" class=\"java.net.URL\">\r\n <string>#{get_uri.encode(xml: :text)}</string>\r\n </void>\r\n <void idref=\"url\">\r\n <void id=\"stream\" method = \"openStream\" />\r\n </void>\r\n </java>\r\n </work:WorkContext>\r\n </soapenv:Header>\r\n <soapenv:Body/>\r\n</soapenv:Envelope>}\r\n end",
"def body_parameters\n p = Hash.new\n p\n end",
"def update(hash)\n @unsigned_query = nil\n\n hash.each do |key, val|\n # Syntactic sugar: Camelize symbol keys.\n if key.is_a? Symbol\n key = key.to_s.split('_').map(&:capitalize).join\n end\n @params[key] = val\n end\n\n self\n end",
"def unpack_body(body)\n # fixed fields\n self.status = body.slice!(0)\n self.flags = body.slice!(0)\n self.server_msg_len = body.slice!(0..1)\n self.data_len = body.slice!(0..1) \n \n # variable-length fields\n self.server_msg = body.slice!(0..(@server_msg_len - 1)) if (@server_msg_len != 0)\n self.data = body.slice!(0..(@data_len - 1)) if (@data_len != 0)\n return(nil)\n end",
"def call(web_method, params) #:nodoc:\n @error = nil\n @savon.call(\n web_method,\n message: params,\n soap_header: { 'ns1:AuthenticationHeader' => @auth.signature }\n ).to_hash\n rescue Exception => e\n @error = e\n @logger.log(e) if @logger\n nil\n end",
"def initialize(headers, body)\n @headers = headers\n @body = body\n end",
"def initialize(input_hash)\n unless input_hash.is_a? Hash\n raise ValidationError, \"Parameter `input_hash` supplied to #{self.class} must be of type Hash (#{input_hash.class}: #{input_hash.inspect})!\"\n end\n super input_hash\n validate!\n end",
"def initialize(hash = {})\n @service_api_key = extract_value(hash, :serviceApiKey)\n @id = extract_value(hash, :id)\n @password = extract_value(hash, :password)\n @sns = extract_value(hash, :sns)\n @access_token = extract_value(hash, :accessToken)\n @refresh_token = extract_value(hash, :refreshToken)\n @expires_in = extract_integer_value(hash, :expiresIn)\n @raw_token_response = extract_value(hash, :rawTokenResponse)\n end",
"def initialize(content)\n document = REXML::Document.new content\n node = REXML::XPath.first document, '//soapenv:Body'\n @parsed = SoapResponse.parse node\n end",
"def initialize(raw_hash)\n raise Errors::InvalidHash, 'invalid hash' unless valid_hash?(raw_hash)\n\n replace(raw_hash)\n\n @cost, @salt, @digest = split_hash(to_s)\n end",
"def initialize(hash = {})\n @timestamp = hash['L_TIMESTAMP'] || nil\n @timezone = hash['L_TIMEZONE'] || nil\n @type = hash['L_TYPE'] || nil\n @email = hash['L_EMAIL'] || nil\n @name = hash['L_NAME'] || nil\n @transaction_id = hash['L_TRANSACTIONID'] || nil\n @status = hash['L_STATUS'] || nil\n @amount = (hash['L_AMT'] || nil).to_f\n @currency_code = hash['L_CURRENCYCODE'] || nil\n @fee_amount = (hash['L_FEEAMT'] || nil).to_f\n @net_amount = (hash['L_NETAMT'] || nil).to_f\n end",
"def require_hash!\n @obj.is_a?(Hash) || handle_error(nil, :invalid_type, \"expected hash object in #{param_name(nil)} but received array object\")\n end",
"def add_service_from_auth_hash(auth_hash)\n self.email = auth_hash.info.email if self.email.blank?\n self.send(\"#{auth_hash.provider}_user_id=\", auth_hash.uid)\n self.send(\"#{auth_hash.provider}_token=\", auth_hash.credentials.token)\n self.send(\"#{auth_hash.provider}_secret=\", auth_hash.credentials.secret)\n self.save\n self.send(\"process_#{auth_hash.provider}_queue\")\n end"
] | [
"0.5992388",
"0.5599322",
"0.55401725",
"0.52851087",
"0.51988",
"0.5166695",
"0.5096424",
"0.5090427",
"0.50637686",
"0.50632364",
"0.5051357",
"0.50453806",
"0.50090665",
"0.49846715",
"0.4972368",
"0.49688593",
"0.49623933",
"0.49608627",
"0.49439153",
"0.49319795",
"0.49298537",
"0.4926106",
"0.4926106",
"0.4926106",
"0.4915951",
"0.4910248",
"0.49054572",
"0.48990318",
"0.48990318",
"0.48986048",
"0.48888868",
"0.4886438",
"0.4872288",
"0.48492953",
"0.48492953",
"0.48482668",
"0.48324054",
"0.48195013",
"0.4805575",
"0.47900224",
"0.47869322",
"0.47766733",
"0.47718063",
"0.4771655",
"0.4760008",
"0.4751207",
"0.47498655",
"0.474012",
"0.47390655",
"0.47310495",
"0.46935484",
"0.46909913",
"0.46883437",
"0.46883437",
"0.46883437",
"0.46883437",
"0.46866468",
"0.46866468",
"0.46819514",
"0.4681121",
"0.4678729",
"0.46720833",
"0.46716535",
"0.46710885",
"0.46695688",
"0.46674475",
"0.46674475",
"0.46674475",
"0.4667136",
"0.4651738",
"0.4646604",
"0.4644005",
"0.46430162",
"0.46341527",
"0.46318394",
"0.46299067",
"0.46128154",
"0.45989478",
"0.459342",
"0.45910376",
"0.4586072",
"0.45837936",
"0.45818853",
"0.45818576",
"0.4573405",
"0.45577317",
"0.45525464",
"0.45523703",
"0.4549799",
"0.4545959",
"0.454496",
"0.45418817",
"0.45376953",
"0.45348132",
"0.4529339",
"0.45287645",
"0.4521235",
"0.45165792",
"0.45131266",
"0.45112908"
] | 0.585774 | 1 |
Sets up HTTPI to return a given +response+. | def returns(response = nil)
http = { :code => 200, :headers => {}, :body => "" }
case response
when Symbol then http[:body] = Fixture[soap_action, response]
when Hash then http.merge! response
end
httpi_mock.returns HTTPI::Response.new(http[:code], http[:headers], http[:body])
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def response=(response); @response = response end",
"def handle_response(response); end",
"def handle_response(response); end",
"def prepare_response(response)\n response\n end",
"def setup\n @response ||= {}\n end",
"def set_response(response, body, status = 200)\n raise NotImplementedError\n end",
"def set_response(response, body, status = 200)\n raise NotImplementedError\n end",
"def fill_header(response); end",
"def set_http_response(response, skip_body=false)\n @data[:response][:instance] = HTTPResponse.new(\n response.code,\n skip_body ? nil : response.body,\n response.to_hash,\n response\n )\n nil\n end",
"def initialize(response)\n @response = response\n end",
"def initialize(response)\n @response = response\n end",
"def initialize(response)\n @response = response\n end",
"def initialize(response)\n @response = response\n end",
"def initialize(response, path, options = {})\n\n # puts \"Init NG:Response , original response : #{response}\"\n # headers\n # @fields = response.get_fields\n # @header = response.to_hash\n @response = response\n puts @response\n puts \"\"\n\n #status\n @code = response.code\n puts @code\n puts \"\"\n # @message = response.message\n @response_name = response.class.name\n\n # @cookie = response.get_fields('set-cookie')\n\n #body\n if( response.body[0] != '{' && response.body[0] != '[')\n #puts \"no wrapper\"\n @body = response.body\n\n else\n # puts \"wrapper : #{response.body[0]}\"\n @body = JSON.parse(response.body)\n end\n\n @path = path\n end",
"def initialize(response)\n @response = response\n end",
"def response_handler( request, response )\n return response if response.scope.out? || !response.text? ||\n response.code == 304\n\n if ignore_responses?\n page = Page.from_data(\n url: response.url,\n response: response.to_h\n )\n else\n page = response.to_page\n end\n\n page = update_forms( page, request, response )\n\n print_info \" * #{page.forms.size} forms\"\n print_info \" * #{page.links.size} links\"\n print_info \" * #{page.cookies.size} cookies\"\n print_info \" * #{page.jsons.size} JSON\"\n print_info \" * #{page.xmls.size} XML\"\n\n @pages << page.dup\n end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response #:nodoc:\n warn \"#{caller(1)[0]}: warning: HTTPResponse#response is obsolete\" if $VERBOSE\n self\n end",
"def initialize(response)\n @response = response\n @headers = response.headers\n @status = response.status\n @body = response.body || ''\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def response_parser; end",
"def response; return @response end",
"def set_response\n @response = Response.find_by_id(params[:id])\n end",
"def got_response(response)\n end",
"def http_response(response)\n status_code, headers, body = response\n http_response = status_line(status_code)\n DefaultResponseHeaders.merge(headers).each do |k,v|\n http_response << \"#{k}: #{v}\\r\\n\"\n end\n http_response << \"\\r\\n\"\n body.each do |s|\n http_response << s\n end\n http_response\n end",
"def initialize(response)\n @status = response.code.to_i\n @headers = response.each_header { |_, _| }\n @body = extract_body(response.body, *headers[Futile::Headers::CONTENT_ENCODING])\n @status = response.code.to_i\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def set_response\n @response = Response.find(params[:id])\n end",
"def initialize(response, request)\n @response = response\n @request = request\n end",
"def setup_response\n begin\n @response = @interceptor.intercept(request: @request) # TODO: ignored_routes, matched_routes, robots_json, & check_static_files options\n rescue SnapSearch::Exception => exception\n @config.on_exception.nil? ? raise(exception) : @config.on_exception.call(exception)\n end\n end",
"def wrap_response(response)\n TinyProxy::Response.new(response.to_hash,\n response.body,\n response.code,\n response.msg)\n end",
"def call_reponse_wrapper(response)\n response_wrapper.new(response)\n end",
"def do_request(request, response)\n body = make_request(request)\n\n # Always 200. A simplification, but fine for user\n # error messages.\n response.status = 200\n response['Content-Type'] = 'text/html' \n response.body = body\n end",
"def create_response(request)\n response = Response.new\n end",
"def handle\n @response.response_code = response_code\n @response.content = view\n @response.layout = set_layout\n @response.output_format = @request.path_info.split(\".\").last\n end",
"def response\n\t\t@response\n\tend",
"def response(&_)\n if block_given?\n @response ||= Response.new\n yield @response\n end\n @response\n end",
"def convert_response(response)\r\n HttpResponse.new(response.status, response.headers, response.body)\r\n end",
"def initialize(response, to_s_sym=nil)\n\t\t\tresponse = response.body if Net::HTTPResponse === response\n\t\t\t@body = response\n\t\t\t@to_s_sym = to_s_sym\n\t\tend",
"def initialize(http_response, request)\n @http_response = http_response\n @request = request\n end",
"def wrap_response(response)\n\t\t\t\t\tif body = response.body\n\t\t\t\t\t\tresponse.body = parser_for(response).new(body)\n\t\t\t\t\tend\n\t\t\t\t\t\n\t\t\t\t\treturn response\n\t\t\t\tend",
"def api_response\n response api_html(to_markup)\n end",
"def call\n with(response: response)\n end",
"def initialize(response)\n raise ArgumentError, \"Response doesnot exist\" if ((response.nil? || response.empty?) && response.code.nil? && response.code.empty?)\n @_response = response\n @_statusCode = response.code\n @_responseBody = response.body\n end",
"def fill_header response\n @response = Mechanize::Headers.new\n\n response.each { |k,v|\n @response[k] = v\n } if response\n\n @response\n end",
"def set(request, response)\n table.insert(\n path: request.path,\n auth: request.headers['Authorization'],\n params_hash: params_hash(request),\n created_at: Time.now,\n response_body: response.body)\n response\n end",
"def response_metadata=(_); end",
"def response\n @reponse ||= website_overview_builder.response\n rescue => e\n Rails.logger.error(e)\n nil\n end",
"def perform(request, response); end",
"def initialize(request, response)\n @request = request\n @response = response\n end",
"def parse_response!; end",
"def response(arguments = {})\n Response.new(self, arguments)\n end",
"def perform\n unless (response = session.last_response)\n raise HTTY::NoResponseError\n end\n show_response response\n self\n end",
"def initialize(response)\n self.savon = response\n self.body = response.body\n self.http_code = response.http.code\n # TODO: handle errors here\n end",
"def response #:nodoc:\n warn \"Net::HTTPResponse#response is obsolete\", uplevel: 1 if $VERBOSE\n self\n end",
"def handle_response(response)\n case response\n when Net::HTTPRedirection\n options[:limit] -= 1\n self.path = response['location']\n @redirect = true\n aggregate_cookies_and_perform(response)\n else\n parsed_response = parse_response(response.body)\n Response.new(parsed_response, response.body, response.code, response.message, response.to_hash)\n end\n end",
"def response\n @response ||= Rack::Response.new\n end",
"def adapt_response(response)\n formatted = Lyg::HttpResponse.new(response.code)\n\n response.raw_headers.each do |key, value|\n formatted.headers[key] = value\n end\n\n response.cookies.each do |key, value|\n formatted.cookies[key] = value\n end\n\n formatted.content = response.body\n return formatted\n end",
"def initialize(response)\n self.response_dup = response\n if response.body.is_a?(Hash) and !response.body[PARAM_PAGE].nil?\n self.first = path_for_page(FIRST_PAGE_NUMBER)\n self.next = response.body[META_NEXT] unless response.body\n self.prev = response.body[META_PREV]\n end\n end",
"def process_page(t, response, count)\n msg = \"[#{\"%.3d\" % count}/#{\"%.3d\" % datastore['MAX_PAGES']}] #{response.code || \"ERR\"} - #{t[:host]} - #{response.request.url}\"\n case response.code\n when 301,302\n if response.headers and response.headers[\"location\"]\n print_status(msg + \" -> \" + response.headers[\"location\"].to_s)\n else\n print_status(msg)\n end\n when 500...599\n print_good(msg)\n when 401\n print_good(msg)\n print_good((\" \" * 24) + \"WWW-Authenticate: #{response.headers['WWW-Authenticate']}\")\n when 200\n print_status(msg)\n when 404\n print_error(msg)\n else\n print_error(msg)\n end\n\n #\n # Process the web page\n #\n uri = URI(response.request.url)\n info = {\n :web_site => t[:site],\n :path => uri.path,\n :query => uri.query,\n :code => response.code,\n :body => response.body,\n :headers => response.headers\n }\n\n if response.headers['content-type']\n info[:ctype] = response.headers['content-type']\n end\n\n # TODO\n #if !page.cookies.empty?\n # info[:cookie] = page.cookies\n #end\n\n if response.headers['authorization']\n info[:auth] = response.headers['authorization']\n end\n\n if response.headers['location']\n info[:location] = response.headers['location']\n end\n\n if response.headers['last-modified']\n info[:mtime] = response.headers['last-modified']\n end\n\n # Report the web page to the database\n report_web_page(info)\n\n # Only process interesting response codes\n return unless [302, 301, 200, 500, 401, 403, 404].include?(response.code)\n\n # Skip certain types of forms right off the bat\n\n # Apache multiview directories\n return if uri.query =~ /^C=[A-Z];O=/ # Apache\n\n forms = []\n form_template = { :web_site => t[:site] }\n\n if form = form_from_url(t[:site], response.request.url)\n forms << form\n end\n\n doc = Nokogiri::HTML(response.body) if response.body rescue nil\n if doc\n doc.css(\"form\").each do |f|\n target = uri\n\n if f['action'] and not f['action'].strip.empty?\n action = f['action']\n\n # Prepend relative URLs with the current directory\n if action[0,1] != \"/\" and action !~ /\\:\\/\\//\n # Extract the base href first\n base = uri.path.gsub(/(.*\\/)[^\\/]+$/, \"\\\\1\")\n doc.css(\"base\").each do |bref|\n if bref['href']\n base = bref['href']\n end\n end\n action = (base + \"/\").sub(/\\/\\/$/, '/') + action\n end\n\n target = to_absolute(URI(action), uri) rescue next\n\n target = URI(target)\n unless target.host == uri.host\n # Replace 127.0.0.1 and non-qualified hostnames with our response.host\n # ex: http://localhost/url OR http://www01/url\n if (target.host.index(\".\").nil? or target.host == \"127.0.0.1\")\n target.host = uri.host\n else\n next\n end\n end\n end\n\n # skip this form if it matches exclusion criteria\n unless target.to_s =~ get_link_filter # TODO will need to filter more than this\n form = {}.merge!(form_template)\n form[:method] = (f['method'] || 'GET').upcase\n form[:query] = target.query.to_s if form[:method] != \"GET\"\n form[:path] = target.path\n form[:params] = []\n f.css('input', 'textarea').each do |inp|\n form[:params] << [inp['name'].to_s, inp['value'] || inp.content || '', { :type => inp['type'].to_s }]\n end\n\n f.css( 'select' ).each do |s|\n value = nil\n\n # iterate over each option to find the default value (if there is a selected one)\n s.children.each do |opt|\n ov = opt['value'] || opt.content\n value = ov if opt['selected']\n end\n\n # set the first one as the default value if we don't already have one\n value ||= s.children.first['value'] || s.children.first.content rescue ''\n\n form[:params] << [ s['name'].to_s, value.to_s, [ :type => 'select'] ]\n end\n\n forms << form\n end\n end\n end\n\n # Report each of the discovered forms\n forms.each do |form|\n next unless form[:method]\n print_status((\" \" * 24) + \"FORM: #{form[:method]} #{form[:path]}\")\n report_web_form(form)\n end\n end",
"def on_response &b\n @response_proc = b\n self\n end",
"def typhoeus_response\n @request.run\n end",
"def handle\n @response.content = :'index.html'\n end",
"def challenge(_request, _response)\n end",
"def response\n @response ||= request!(true)\n end",
"def response\n @page.response\n end",
"def set_response\n @response = Response.without_soft_destroyed.find(params[:id])\n end",
"def response(text, status = 200, headers = {})\n raise \"Cannot respond multiple times\" unless @response.nil?\n @response = Rack::Response.new([text].flatten, status, headers)\n end",
"def initialize(response, url, query_params = {})\n @response_status = response.status\n @response_body = response.body\n @response_headers = response.headers\n message = \"Status code #{response.status} on resource #{url}\"\n message += \" with params: #{query_params.inspect}\" if query_params.present?\n super(message)\n end",
"def request\n self.response = prepare_response(http_communication.content)\n end",
"def response\n @response ||= Net::HTTP.new(uri.host).request(request)\n end",
"def send_response\r\n if self.response.class.name == \"Proc\"\r\n return self.response.call\r\n end\r\n self.response\r\n end",
"def raw_response; end",
"def call\n conn = http_setup\n res = set_up_response(method.to_sym, uri, conn, headers ,body)\n\n response = HttpResponse.new(res)\n response.uri = uri\n raise response.error if !response.success? && !@has_retry_filter\n response\n end",
"def response=(_arg0); end",
"def response=(_arg0); end",
"def response=(_arg0); end"
] | [
"0.6912565",
"0.68331957",
"0.68331957",
"0.67839104",
"0.67219913",
"0.66436416",
"0.66436416",
"0.6630335",
"0.6616575",
"0.64495313",
"0.64495313",
"0.64495313",
"0.64495313",
"0.6409897",
"0.6359536",
"0.6346745",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.62924343",
"0.6283786",
"0.6281952",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.627146",
"0.6239853",
"0.6214001",
"0.6211428",
"0.62067974",
"0.6144625",
"0.61179686",
"0.61022884",
"0.61022884",
"0.6087417",
"0.6081165",
"0.60514015",
"0.60436785",
"0.6042937",
"0.60353166",
"0.6026794",
"0.6021434",
"0.6004571",
"0.5982652",
"0.59601986",
"0.5957678",
"0.59410083",
"0.59377027",
"0.5929232",
"0.59256864",
"0.5908722",
"0.5903642",
"0.58634996",
"0.58600265",
"0.5855328",
"0.5844161",
"0.5824278",
"0.58211356",
"0.5820435",
"0.5813717",
"0.5810379",
"0.5786309",
"0.5765584",
"0.5751386",
"0.5750401",
"0.57470536",
"0.57375395",
"0.5736387",
"0.5736342",
"0.5727512",
"0.5727134",
"0.5716684",
"0.57097536",
"0.5706948",
"0.5699574",
"0.56991315",
"0.5693958",
"0.5692555",
"0.5691141",
"0.56868917",
"0.5686641",
"0.5686641",
"0.5686641"
] | 0.0 | -1 |
Sets up Savon to respond like there was a SOAP fault. | def raises_soap_fault
Savon::SOAP::Response.any_instance.expects(:soap_fault?).returns(true)
self
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def format_soap_faults\n fault = self.hash[:envelope][:body].delete(\"soap:Fault\") || self.hash[:envelope][:body].delete(:fault)\n self.errors << (fault[:faultstring] || fault[\"faultstring\"])\n self.valid = false\n end",
"def raise_errors\n message = 'FIXME: Need to parse http for response message'\n return self.format_soap_faults if savon.soap_fault?\n\n case http.code.to_i\n when 200\n return\n when 400\n raise BadRequest, \"(#{http.code}): #{message}\"\n when 401\n raise Unauthorized, \"(#{http.code}): #{message}\"\n when 403\n raise Forbidden, \"(#{http.code}): #{message}\"\n when 404\n raise NotFound, \"(#{http.code}): #{message}\"\n when 406\n raise NotAcceptable, \"(#{http.code}): #{message}\"\n when 500\n raise InternalServerError, \"Stamps.com had an internal error. (#{http.code}): #{message}\"\n when 502..503\n raise ServiceUnavailable, \"(#{http.code}): #{message}\"\n end\n end",
"def error(response)\n throw response.http_error? ? response.http_error : response.soap_fault\n end",
"def set_error(ex, backtrace=false)\n\n # set repsonse headers\n @status = 404;\n @header['content-type'] = \"text/html; charset=UTF-8\"\n\n # set response content\n @body = ''\n @body << <<-_end_of_html_\n \n <HTML>\n <HEAD>\n <TITLE>No page for you!</TITLE>\n\n <STYLE type=\"text/css\">\n BODY { font: 8pt/12pt verdana }\n H1 { font: 13pt/15pt verdana }\n H2 { font: 8pt/12pt verdana }\n A:link { color: black; text-decoration: none }\n A:visited { color: black; text-decoration: none }\n </STYLE>\n\n </HEAD><BODY>\n <TABLE width=500 border=0 cellspacing=10>\n <TR>\n <TD>\n\n <h1><a href=\"http://www.bindshell.net/tools/beef/\">These aren't the pages you're looking for</a></h1>\n \n </TD>\n </TR>\n </TABLE>\n </BODY>\n </HTML>\n \n _end_of_html_\n \n end",
"def setup_response\n begin\n @response = @interceptor.intercept(request: @request) # TODO: ignored_routes, matched_routes, robots_json, & check_static_files options\n rescue SnapSearch::Exception => exception\n @config.on_exception.nil? ? raise(exception) : @config.on_exception.call(exception)\n end\n end",
"def flex_error_handling\n response.headers['Status'] = interpret_status(200) if response.headers['Status'] == interpret_status(422)\n response.headers['Status'] = interpret_status(200) if response.headers['Status'] == interpret_status(201)\n end",
"def halt\n throw :halt, response\n end",
"def internal_error\n\t\tself.status = 500\n\t\tself.headers = {}\n\t\tself.content = [\"Internal error\"]\n\t\tself\n\tend",
"def finish_request\n if (400..499).include? response.code\n set_error_body(response.code)\n end\n end",
"def error!(status, message)\n request.halt status, {error: message}.to_json\n end",
"def error!(status, message)\n request.halt status, {error: message}.to_json\n end",
"def render_soap_error(message, options = {})\n @namespace = NAMESPACE\n soap_error_response = render_to_string :template => 'wash_with_soap/error', :status => 500,\n :locals => { :error_message => message }\n\n if options[:ws_security] == \"encrypt\" || options[:ws_security] == \"sign\" || options[:ws_security] == \"sign_encrypt\"\n soap_error_response = ws_security_apply(soap_error_response, options)\n end\n\n render :xml => soap_error_response\n end",
"def endpoint\n soap_message = Nokogiri::XML(request.body.read)\n process_soap_headers(soap_message)\n soap_body = extract_soap_body(soap_message)\n validate_soap_body(soap_body)\n # Attempt to determine the SOAP operation and process it\n self.send(soap_operation_to_method(soap_body), soap_body)\n rescue StandardError => e\n # If any exception was raised generate a SOAP fault, if there is no\n # fault_code present then default to fault_code Server (indicating the\n # message failed due to an error on the server)\n @fault_code = e.respond_to?(:fault_code) ? e.fault_code : \"Server\"\n @fault_string = e.message\n render :fault, :status => 500\n end",
"def response!\n return response if !response.errors?\n\n raise response.to_exception\n end",
"def set_soap_headers\n self.headers['Content-Type'] = \"text/xml;charset=utf-8\"\n self.headers['SOAPAction'] = \"\\\"\\\"\"\n return \"headers set to soap\"\n end",
"def exec\n # If nginx (pre-1.2) is used as a proxy server, and SabreDAV as an\n # origin, we must make sure we send back HTTP/1.0 if this was\n # requested.\n # This is mainly because nginx doesn't support Chunked Transfer\n # Encoding, and this forces the webserver SabreDAV is running on,\n # to buffer entire responses to calculate Content-Length.\n @http_response.http_version = @http_request.http_version\n\n # Setting the base url\n @http_request.base_url = base_uri\n invoke_method(@http_request, @http_response)\n rescue ::Exception => e # use Exception (without ::) for easier debugging\n begin\n emit('exception', [e])\n rescue\n end\n\n dom = LibXML::XML::Document.new\n\n error = LibXML::XML::Node.new('d:error')\n LibXML::XML::Namespace.new(error, 'd', 'DAV:')\n LibXML::XML::Namespace.new(error, 's', NS_SABREDAV)\n dom.root = error\n\n h = lambda do |v|\n CGI.escapeHTML(v)\n end\n\n if self.class.expose_version\n error << LibXML::XML::Node.new('s:sabredav-version', h.call(Version::VERSION))\n end\n\n error << LibXML::XML::Node.new('s:exception', h.call(e.class.to_s))\n error << LibXML::XML::Node.new('s:message', h.call(e.message))\n\n if @debug_exceptions\n backtrace_node = LibXML::XML::Node.new('s:backtrace')\n e.backtrace.each do |entry|\n backtrace_node << LibXML::XML::Node.new('s:entry', entry)\n end\n error << backtrace_node\n end\n\n if e.is_a?(Exception)\n http_code = e.http_code\n e.serialize(self, error)\n headers = e.http_headers(self)\n else\n http_code = 500\n headers = {}\n end\n\n headers['Content-Type'] = 'application/xml; charset=utf-8'\n @http_response.status = http_code\n @http_response.update_headers(headers)\n @http_response.body = dom.to_s\n sapi.send_response(@http_response)\n end",
"def set_error(status)\n error_response = Rack::Response.new\n error_response.status = status\n @error_response = error_response.finish {yield}\n end",
"def error!(status, message)\n response.status = status\n response[\"Content-Type\"] = \"application/json\"\n response.write({error: message}.to_json)\n request.halt\n end",
"def exception_response\n { ok: false, message: 'Error processing request' }\n end",
"def if_non_200_raise(exception_klass)\n @non_200_exception = exception_klass\n self\n end",
"def _render_soap(result, options)\n @namespace = NAMESPACE\n @operation = soap_action = request.env['wash_out.soap_action']\n action_spec = self.class.soap_actions[soap_action][:out].clone\n result = { 'value' => result } unless result.is_a? Hash\n result = HashWithIndifferentAccess.new(result)\n inject = lambda {|data, spec|\n spec.each do |param|\n if param.struct?\n inject.call(data[param.name], param.map)\n else\n param.value = data[param.name]\n end\n end\n }\n\n soap_response = render_to_string :template => 'wash_with_soap/response',\n :locals => { :result => inject.call(result, action_spec) }\n\n if options[:ws_security] == \"encrypt\" || options[:ws_security] == \"sign\" || options[:ws_security] == \"sign_encrypt\"\n soap_response = ws_security_apply(soap_response, options)\n end\n \n\n\n if is_exception?(soap_response)\n Rails.logger.error \"PHP_SCRIPT_ERROR #{ws_security_response}\"\n render :template => 'wash_with_soap/error', :status => 500,\n :locals => { :error_message => \"php_script_error\" }\n else\n render :xml => soap_response\n end\n end",
"def initialize(e)\n @fault = e\n e.to_hash.tap do |fault|\n fault_code = fault[:fault][:faultcode]\n fault_string = parse_fault(fault[:fault][:faultstring])\n super(compose_message('soap_error',\n message: fault_string,\n code: fault_code))\n end\n end",
"def fixup_response( response )\n\t\tresponse = super\n\n\t\t# Ensure the response is acceptable; if it isn't respond with the appropriate\n\t\t# status.\n\t\tunless response.acceptable?\n\t\t\tbody = self.make_not_acceptable_body( response )\n\t\t\tfinish_with( HTTP::NOT_ACCEPTABLE, body ) # throw\n\t\tend\n\n\t\treturn response\n\tend",
"def respond_bad_request; make_response(nil, false, 400, \"Bad Request\") end",
"def throw(code)\n status(*Http::Status.for_code(code))\n super :halt\n end",
"def stuff_999_response(env, err)\n env.tap do\n _1.reason_phrase = \"#{err.class} #{err.message}\"\n _1.response_body = ''\n _1.response_headers = Faraday::Utils::Headers.new\n _1.status = HTTPDisk::ERROR_STATUS\n end\n Faraday::Response.new(env)\n end",
"def respond_on s\n headers = { STATUS_KEY => @status.to_s }.merge @headers\n s.headers stringify_headers(headers)\n if String === @body\n s.data @body\n else\n stream.log :error, \"unexpected @body: #{caller[0]}\"\n end\n rescue ::HTTP2::Error::StreamClosed\n stream.log :warn, \"stream closed early by client\"\n end",
"def handle_errors(response)\n if response.soap_fault?\n exception = exception_for_soap_fault(response)\n raise exception\n end\n if response.http_error?\n raise AdsCommon::Errors::HttpError,\n \"HTTP Error occurred: %s\" % response.http_error\n end\n end",
"def serve_exception(_exception); end",
"def flash_in_response_headers\n if request.xhr?\n response.headers['X-Psap-Message-Type'] = 'error' unless flash['error'].blank?\n response.headers['X-Psap-Message-Type'] = 'success' unless flash['success'].blank?\n response.headers['X-Psap-Message'] = flash['error'] unless flash['error'].blank?\n response.headers['X-Psap-Message'] = flash['success'] unless flash['success'].blank?\n flash.clear unless @keep_flash\n end\n end",
"def give_X(response, status, mime_type, msg)\n\theaders = response.headers\n\tresponse.status =status\n\tresponse.body = [msg]\n\tresponse.headers[\"Content-Type\"] = mime_type\n\tresponse.headers[CONTENT_LENGTH] = response.body.join.length.to_s\n end",
"def soap_fault?\n soap_fault.present?\n end",
"def error code, body=nil\n code, body = 500, code if code.respond_to? :to_str\n @response.body = body unless body.nil?\n halt code\n end",
"def status_error\n @status = 500\n end",
"def returns(response = nil)\n http = case response\n when Symbol then { :body => Fixture[action, response] }\n when Hash then response\n end\n\n Savon.config.hooks.define(:spec_response, :soap_request) do |_, request|\n respond_with(http)\n end\n\n self\n end",
"def log_response\n log \"SOAP response (status #{@response.code}):\"\n log @response.body\n end",
"def init_response\n headers = SSE_HEADER.merge @headers\n @parser.headers stringify_headers(headers)\n rescue ::HTTP2::Error::StreamClosed\n @stream.log :warn, \"stream closed early by client\"\n end",
"def send_error(e, res)\n res.status = 500\n res['Content-Type'] = 'application/json'\n body = { code: -1, error: \"#{e.class}: #{e.message}\" }\n body[:backtrace] = e.backtrace\n res.body = @shell.data(body).json(@shell.indent)\n @shell.logger.warn(Impl.format_error(e))\n\tend",
"def error(code=500, body = nil)\n unless code.is_a?(Integer)\n body = code\n code = 500\n end\n\n response.status = code\n response.body = body if body\n halt\n end",
"def set_response(response, body, status = 200)\n raise NotImplementedError\n end",
"def set_response(response, body, status = 200)\n raise NotImplementedError\n end",
"def call(env)\n # catch(:halt) do\n # r = @_request\n # r.block_result(instance_exec(r, &block))\n # @_response.finish\n # end\n [200, { \"Content-Type\": \"text/plain\" }, \"Helo\"]\n end",
"def json_error_response\n # status must be assigned, ||= will often return 200\n self.status = 401\n self.content_type = \"application/json\"\n # have to include format_json here because custom error app\n # doesn't seem to call render json: as normal\n # so pretty param is ignored\n self.response_body = format_json(\n { errors: [{ status: status, detail: i18n_message }] },\n {} # options hash normally handled by render block\n )\n end",
"def soap_request(options = {})\n options = { error_response_codes: [404] }.merge options\n\n headers = { 'Content-Type' => CONTENT_TYPES[version] }\n if action = options.delete(:action)\n headers['SOAPAction'] = action\n elsif operation = options.delete(:operation)\n headers['SOAPAction'] = soap_action operation\n end\n\n if version == 1 && headers['SOAPAction'].blank?\n raise 'SOAPAction header value must be provided for SOAP 1.1'\n end\n\n request_options = { format: :xml, headers: headers }\n Response.new(version, perform_request(:post, endpoint, options.merge(request_options)), fault_builder).tap do |response|\n raise response.fault if response.fault?\n raise response.error if response.error?\n end\n end",
"def halt_on_error!(error_code = :bad_request)\n on_error { halt error_code }\n self\n end",
"def send_error(e, res)\n res.code = 500\n res['Content-Type'] = 'application/json'\n body = { code: -1, error: \"#{e.class}: #{e.message}\" }\n body[:backtrace] = e.backtrace\n res.body = @shell.data(body).json(@shell.indent)\n @shell.logger.warn(Impl.format_error(e))\n\tend",
"def fixup_response( response )\n\t\tif RubyProf.running?\n\t\t\tprofile = RubyProf.stop\n\n\t\t\tresponse.body.truncate( 0 )\n\t\t\tprinter = RubyProf::CallStackPrinter.new( profile )\n\t\t\tprinter.print( response.body, min_percent: 2 )\n\t\t\tresponse.content_type = 'text/html'\n\t\tend\n\n\t\tsuper\n\tend",
"def unrecognizedResponse( text )\n end",
"def foreign_server_failure\n [ 503, {'Content-Type'=>'text/plain', 'Content-Length' => '23'},\n ['Foreign server failure.'] ]\n end",
"def soap_fault?\n begin\n ns_prefix.each do |prefix|\n if @document.find(\"/#{prefix}:Envelope\").first\n return true\n end\n end\n return false\n rescue Exception => ex\n Rails.logger.error(ex)\n return false\n end\n end",
"def reset_response\n self.instance_variable_set(:@_response_body, nil)\n end",
"def run\n @response ||= build_response catch_halt{ @handler.run }\n end",
"def set_response!\n get_responding_methods\n self.responding_object = @responding_object\n if successful?\n self.status_message ||= (@success_message || \"Status: successful\")\n else\n self.status_message ||= (@failure_message || \"Status: unsuccessful\")\n end\n return true\n end",
"def respond_with(status_code)\n response.status = status_code\n response.write \"\"\n nil\n end",
"def error(x, status:200, type:\"request\", title:\"An error occurred\", message:\"\", args: [])\n x.res.status = status\n if App[:app_error][type.to_sym]\n App[:app_error][type.to_sym][:get][x, title, message, *args]\n else\n x << \"ERROR: #{title} - #{message}\"\n end\n end",
"def _handle_error(e)\n res = @_response\n res.send(:initialize)\n res.status = 500\n res = _roda_handle_route{handle_error(e)}\n begin\n _roda_after(res)\n rescue => e2\n if errors = env['rack.errors']\n errors.puts \"Error in after hook processing of error handler: #{e2.class}: #{e2.message}\"\n e2.backtrace.each{|line| errors.puts(line)}\n end\n end\n res\n end",
"def error_500\n if !request.xhr?\n render :template => \"errors/500\", :status => \"500 Internal Server Error\"\n else\n respond_to do |format|\n format.json {render :json => {\n :error => \"500\",\n }, :status => \"500\"}\n end\n end\n end",
"def render_error(options)\n error = options[:message] || env['sinatra.error'].message\n status = options[:status] || 400\n \n halt status, { 'Content-type' => 'application/json; charset=utf-8' }, error \n end",
"def XSSF_404(res)\n\t\t\t\tres['Access-Control-Allow-Origin']\t= '*'\n\t\t\t\tres.status = 404\n\t\t\tend",
"def render_xml_response \n @trust.update_attributes(:expires_at => Time.now.utc) if @trust && @trust.xml_expire? \n response.headers['CONTENT_TYPE'] = 'text/xml; charset=utf-8' \n response.headers['Content-Type'] = 'text/xml; charset=utf-8' \n render :text => \"<Response>#{@resp.headers['location'].gsub(/&/,'&')}</Response>\" \n end",
"def halt(new_status = nil, new_body = nil, new_headers = {})\n\t\t\tstatus new_status if new_status\n\t\t\tbody new_body || (default_body_of_nearest_route if body.empty?)\n\t\t\tresponse.headers.merge!(new_headers)\n\t\t\tthrow :halt\n\t\tend",
"def check_response_for_errors(response)\n # take no action if there are no errors\n return unless response.soap_fault? || response.http_error?\n\n response_hash = response.to_hash\n if response_hash[:fault] && response_hash[:fault][:detail]\n # we can parse the response, so check the specific error code\n error_code = response_hash[:fault][:detail][:code]\n\n # if we can recognize the specific code, dispatch the proper exception\n if ERROR_HANDLERS.has_key? error_code\n raise ERROR_HANDLERS[error_code], response_hash[:fault][:faultstring]\n end\n\n # otherwise return a well-defined error string with the code and error message\n raise UnknownError, \"#{response_hash[:fault][:detail][:code]} #{response_hash[:fault][:detail][:description]}: #{response_hash[:fault][:faultstring]}\"\n end\n\n # we can't parse the error, so just pass back the entire response body\n raise UnknownError, response.to_s\n end",
"def report(note, excpt)\n # handle non-ZAX errors, such as encoding, etc.\n @controller.expires_now\n @controller.head @response_code,\n x_error_details: 'Your request can not be completed.'\n _log_exception WARN,note,excpt\n end",
"def faulty(aReason)\n @failure_reason = aReason\n end",
"def dispatch(ews, soapmsg, opts)\n respmsg = post(soapmsg)\n @log.debug <<-EOF.gsub(/^ {6}/, '')\n Received SOAP Response:\n ----------------\n #{Nokogiri::XML(respmsg).to_xml}\n ----------------\n EOF\n opts[:raw_response] ? respmsg : ews.parse_soap_response(respmsg, opts)\n end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def response; end",
"def test_service_error_response\n twerr = Twirp::Error.invalid_argument('foo')\n resp = Twirp::Service.error_response(twerr)\n assert_equal 400, resp[0]\n assert_equal 'application/json', resp[1]['Content-Type']\n assert_equal '{\"code\":\"invalid_argument\",\"msg\":\"foo\"}', resp[2][0]\n end",
"def before_server_error(exception); end",
"def reset_response\n self.instance_variable_set(:@_response_body, nil)\n end",
"def populate_error resp\n code = resp.http_response.status\n if EMPTY_BODY_ERRORS.include?(code) and empty_response_body?(resp.http_response.body)\n error_class = EMPTY_BODY_ERRORS[code]\n resp.error = error_class.new(resp.http_request, resp.http_response)\n else\n super\n end\n end",
"def error(output)\n respond_with(Response::Failure, output)\n end",
"def error(output)\n respond_with(Response::Failure, output)\n end",
"def error\n \"#{self.fault_string} (#{self.fault_code})\" unless self.fault_code.blank?\n end",
"def alert(x, status:200, type:\"request\", title:\"Alert\", message:\"\", args: [])\n x.res.status = status\n App[:app_error][type.to_sym][:get][x, title, message, *args]\n end",
"def status_code\n 500 # This should not be called directly\n end",
"def status\n 500\n end",
"def process_response(response)\n @last_response = response.body\n\n logger.debug @last_response if logger.present?\n\n if response.body =~ /:Fault>/ then\n handle_error(response)\n else\n response.body\n end\n end",
"def four_twenty_two(e)\n json_response({ message: e.message}, :unprocessable_entity)\nend",
"def render_error\n respond_to do| wants|\n wants.html {render :to_xml => 'error.xml.builder', :layout => false, :status => @error.code }\n wants.json {render :to_json => 'error.xml.builder', :status => @error.code }\n wants.xml {render :to_xml => 'error.xml.builder', :layout => false, :status => @error.code }\n wants.yaml {render :to_yaml => 'error.xml.builder', :status => @error.code }\n end\n end",
"def error\n render plain: '500 Internal Server Error', status: :internal_server_error\n end",
"def response_code; end",
"def response_code; end",
"def response_code; end",
"def response_code; end",
"def not_found_response\n [ 404, { \"Content-Type\" => \"text/plain\", \"Content-Length\" => \"9\", \"X-Cascade\" => \"pass\" }, [ \"Not found\" ] ]\n end",
"def respond_with(status_code)\n response.status = status_code\n response.write ''\n nil\n end",
"def dispatch(soap_action)\n response = @request.soap @soap\n Response.new response\n end",
"def server_errors; end"
] | [
"0.614185",
"0.6041459",
"0.60386366",
"0.6015774",
"0.59535646",
"0.5841786",
"0.5819629",
"0.5807705",
"0.5764952",
"0.5753135",
"0.5753135",
"0.57317203",
"0.5716532",
"0.5706272",
"0.566745",
"0.5649483",
"0.5637122",
"0.5628838",
"0.5577762",
"0.5465068",
"0.54605436",
"0.5448289",
"0.54415166",
"0.54290676",
"0.5403287",
"0.538882",
"0.5385719",
"0.5380979",
"0.53583467",
"0.53415334",
"0.53139436",
"0.5310896",
"0.53093517",
"0.52890545",
"0.5284423",
"0.5283794",
"0.5274149",
"0.5269203",
"0.52580893",
"0.5255628",
"0.5255628",
"0.5255122",
"0.52408993",
"0.52273464",
"0.5209957",
"0.5193499",
"0.5191967",
"0.5189693",
"0.51881397",
"0.51835144",
"0.5178696",
"0.5169419",
"0.516594",
"0.5164882",
"0.5162846",
"0.5161562",
"0.51540387",
"0.51495856",
"0.51449996",
"0.5141657",
"0.513137",
"0.51282424",
"0.51205385",
"0.5112497",
"0.51104426",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.5098723",
"0.50967",
"0.50759864",
"0.50754386",
"0.50688815",
"0.50672704",
"0.50672704",
"0.50649273",
"0.50603604",
"0.50533307",
"0.5053029",
"0.5051468",
"0.5051236",
"0.505122",
"0.5048943",
"0.5044347",
"0.5044347",
"0.5044347",
"0.5044347",
"0.50306624",
"0.5027809",
"0.5026276",
"0.5015435"
] | 0.6792786 | 0 |
Wrap tests that use Mocha and skip if unavailable. | def uses_mocha(test_name)
require 'mocha' unless Object.const_defined?(:Mocha)
rescue LoadError => load_error
$stderr.puts "Skipping #{test_name} tests. `gem install mocha` and try again."
else
yield
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def uses_mocha(test_name)\n require 'mocha'\n yield\nrescue LoadError\n $stderr.puts \"Skipping #{test_name} tests. `gem install mocha` and try again.\"\nend",
"def uses_mocha(test_name)\n unless Object.const_defined?(:Mocha)\n gem 'mocha', '>= 0.9.5'\n require 'mocha'\n end\nrescue LoadError => load_error\n $stderr.puts \"Skipping #{test_name} tests. `gem install mocha` and try again.\"\nelse\n yield\nend",
"def test\n return\n # TODO\n # skips test framework, but we can probably just bastardize the options in the same way as with :skip_bundle\n # either make `test` build the actual directories etc., or use a script\n # either way, this method is stupid.\n end",
"def test\n return\n # TODO\n # skips test framework, but we can probably just bastardize the options in the same way as with :skip_bundle\n # either make `test` build the actual directories etc., or use a script\n # either way, this method is stupid.\n end",
"def exit_skipping_test()\n Process.exit 99\nend",
"def uses_shoulda(&blk)\n begin\n require 'rubygems'\n require 'shoulda'\n yield\n rescue Gem::LoadError\n $stderr.puts \"Sorry, you need to install shoulda to run these tests: `gem install shoulda`\"\n end\nend",
"def test_hack\n assert(true)\n end",
"def test_should_be_setup\n assert false\n end",
"def test(what = nil)\n ActiveRecord::Base.logger.silence do\n case what\n when NilClass, :all\n rake \"test:units\"\n when String\n begin\n old_env, ENV[\"RAILS_ENV\"] = ENV[\"RAILS_ENV\"], \"test\"\n ENV['TEST'] = \"test/#{what}_test.rb\"\n rake \"test:single\"\n ensure\n ENV['TEST'] = nil\n ENV[\"RAILS_ENV\"] = old_env\n end\n end\n end\n nil\n rescue SystemExit\n nil\n end",
"def test_unit_not_dead\n end",
"def my_tests\n end",
"def test_nothing\n end",
"def test_legacy\n # Set up legacy handlers\n setup_legacy_handling\n\n common_tests\n end",
"def run(*args) #:nodoc:\n return if @method_name == \"default_test\"\n super\n end",
"def mocha_teardown\n Mockery.teardown\n end",
"def test_nothing\n end",
"def puke(klass, meth, err)\n case err\n when MiniTest::Skip\n @turn_test.skip!(err)\n turn_reporter.skip(err)\n when MiniTest::Assertion\n @turn_test.fail!(err)\n turn_reporter.fail(err)\n else\n @turn_test.error!(err)\n turn_reporter.error(err)\n end\n super(klass, meth, err)\n end",
"def build_test\n \n return if skip_method(__method__)\n \n build_test_fixtures\n build_test_helper\n build_functional_test\n build_integration_test\n build_unit_test\n end",
"def test?\n rspec? || minitest?\n end",
"def rspec?; ::ENV['TEST_RUNNER'].present?; end",
"def test_a_few_nonfunctional_cases\n run_test_as('wizard') do\n assert_equal 0, simplify(command(%Q|; fork (0); suspend(0); endfork|))\n send_string %Q|; kill_task(task_id());|\n end\n end",
"def test_unit_can_be_killed\n end",
"def test_case(target, &block)\n case target\n when Class\n $TEST_SUITE << Lemon::TestClass.new(:target=>target, &block)\n when Module\n $TEST_SUITE << Lemon::TestModule.new(:target=>target, &block)\n else\n if defined?(super)\n super(target, &block)\n else\n raise\n end\n end\n end",
"def add_testing\n setup_rspec\n setup_rspec_generators\n setup_rails_helper\n setup_factories_file\nend",
"def pass?\n chdir do\n setup_test_app\n run_tests\n end\n end",
"def test_disabled?\n ENV['NO_TEST'] == '1'\nend",
"def test_disabled?\n ENV['NO_TEST'] == '1'\nend",
"def skip_tests\n @skip_base_tests ||= false\n return ((self.class == ApplicationControllerTest) || @skip_base_tests)\n end",
"def test_nothing\n return true\n end",
"def disable_test_install\n Gem.use_paths(\"test/data/gemhome\")\n result = @remote_installer.install('foo')\n assert_equal [nil], result\n end",
"def __dummy_test__\n end",
"def run\n begin\n self.class.roby_should_run(self, Roby.app)\n super\n rescue MiniTest::Skip => e\n puke self.class, self.name, e\n end\n end",
"def test?\n rspec? || env == 'test'\n end",
"def add_working_test_case_with_adapter_tests\n add_working_test_case do |test_case|\n test_case.add_to_before_tests <<-EOT\n include AdapterTests::TestUnit\n EOT\n yield test_case if block_given?\n end\n end",
"def add_working_test_case_with_adapter_tests\n add_working_test_case do |test_case|\n test_case.add_to_before_tests <<-EOT\n include AdapterTests::RSpec\n EOT\n yield test_case if block_given?\n end\n end",
"def fails_on_jruby\n before do\n unless SpecConfig.instance.mri?\n skip \"Fails on jruby\"\n end\n end\n end",
"def fails_on_jruby\n before do\n unless SpecConfig.instance.mri?\n skip \"Fails on jruby\"\n end\n end\n end",
"def test_harness_dependencies(*)\n return unless platform[/n(5|6)k/]\n skip_if_nv_overlay_rejected(agent)\n\n # Vxlan has a hard requirement to disable feature fabricpath on n5/6k\n cmd = 'no feature-set fabricpath'\n command_config(agent, cmd, cmd)\nend",
"def self_test; end",
"def self_test; end",
"def fake!(&block)\n switch_test_mode(:fake, &block)\n end",
"def run\n begin\n time_it do\n self.class.roby_should_run(self, app)\n end\n rescue Minitest::Skip\n return Minitest::Result.from(self)\n end\n\n super\n end",
"def define_test_tasks\n default_tasks = []\n\n if File.directory? \"test\" then\n desc 'Run the test suite. Use FILTER or TESTOPTS to add flags/args.'\n task :test do\n ruby make_test_cmd\n end\n\n desc 'Run the test suite using multiruby.'\n task :multi do\n ruby make_test_cmd(:multi)\n end\n\n desc 'Show which test files fail when run alone.'\n task :test_deps do\n tests = Dir[\"test/**/test_*.rb\"] + Dir[\"test/**/*_test.rb\"]\n\n paths = ['bin', 'lib', 'test'].join(File::PATH_SEPARATOR)\n null_dev = Hoe::WINDOZE ? '> NUL 2>&1' : '&> /dev/null'\n\n tests.each do |test|\n if not system \"ruby -I#{paths} #{test} #{null_dev}\" then\n puts \"Dependency Issues: #{test}\"\n end\n end\n end\n\n default_tasks << :test\n end\n\n if File.directory? \"spec\" then\n begin\n require 'spec/rake/spectask'\n\n desc \"Run all specifications\"\n Spec::Rake::SpecTask.new(:spec) do |t|\n t.libs = self.rspec_dirs\n t.spec_opts = self.rspec_options\n end\n rescue LoadError\n # do nothing\n end\n default_tasks << :spec\n end\n\n desc 'Run the default task(s).'\n task :default => default_tasks\n\n desc 'Run ZenTest against the package.'\n task :audit do\n libs = %w(lib test ext).join(File::PATH_SEPARATOR)\n sh \"zentest -I=#{libs} #{spec.files.grep(/^(lib|test)/).join(' ')}\"\n end\n end",
"def setup_test_suite\n return unless config.dig(\"test_suite\") == \"rspec\"\n\n generate \"rspec:install\"\n run \"rm -r test\"\n end",
"def enable_test_interface\n extend Dry::Configurable::TestInterface\n end",
"def test_nothing; end",
"def test_test_mode\n set_env 'RACK_ENV', nil\n refute Config.test?\n set_env 'RACK_ENV', 'test'\n assert Config.test?\n end",
"def tests; end",
"def tests; end",
"def initialize_test\n self.multiruby_skip ||= []\n self.testlib ||= :minitest\n self.test_prelude ||= nil\n self.test_task = nil\n end",
"def skip_now\n @skipped = true\n raise TestSkip\n end",
"def run(result)\n return if @method_name.to_s == \"default_test\"\n\n if using_mocha = respond_to?(:mocha_verify)\n assertion_counter_klass = if defined?(Mocha::TestCaseAdapter::AssertionCounter)\n Mocha::TestCaseAdapter::AssertionCounter\n else\n Mocha::Integration::TestUnit::AssertionCounter\n end\n assertion_counter = assertion_counter_klass.new(result)\n end\n\n yield(Test::Unit::TestCase::STARTED, name)\n @_result = result\n begin\n begin\n run_callbacks :setup\n setup\n __send__(@method_name)\n mocha_verify(assertion_counter) if using_mocha\n rescue Mocha::ExpectationError => e\n add_failure(e.message, e.backtrace)\n rescue Test::Unit::AssertionFailedError => e\n add_failure(e.message, e.backtrace)\n rescue Exception => e\n raise if PASSTHROUGH_EXCEPTIONS.include?(e.class)\n add_error(e)\n ensure\n begin\n teardown\n run_callbacks :teardown, :enumerator => :reverse_each\n rescue Test::Unit::AssertionFailedError => e\n add_failure(e.message, e.backtrace)\n rescue Exception => e\n raise if PASSTHROUGH_EXCEPTIONS.include?(e.class)\n add_error(e)\n end\n end\n ensure\n mocha_teardown if using_mocha\n end\n result.add_run\n yield(Test::Unit::TestCase::FINISHED, name)\n end",
"def fails_intermittently(issue_link, args = {})\n raise ArgumentError, \"provide a Jira ticket link\" unless issue_link\n raise ArgumentError, \"a block is required\" unless block_given?\n\n yield\nrescue Minitest::Assertion, StandardError, SignalException # we have a test failure!\n STDERR.puts \"\\n\\nIntermittent test failure! See: #{issue_link}\"\n\n if args.empty?\n STDERR.puts \"No further debugging information available.\"\n else\n STDERR.puts \"Debugging information:\\n\"\n args.keys.sort.each do |key|\n STDERR.puts \"#{key} => #{args[key].inspect}\"\n end\n end\nend",
"def before_test(test); end",
"def before_test(test); end",
"def default_test; end",
"def skip_suite(suite, reason) # :nodoc:\n suite.skip(reason)\n reason = suite.reason\n suite.assertions.each do |test|\n test.skip(reason)\n end\n end",
"def setup\n # runs before every test\n # wipe and recreate .test directory, switch pwd \n Dir.chdir(@@start_dir)\n if File.exist?('.test')\n FileUtils.rm_rf('.test')\n end\n \n Dir.mkdir('.test')\n Dir.chdir('.test')\n end",
"def run_test\n # Add your code here...\n end",
"def run_test\n # Add your code here...\n end",
"def it desc = \"anonymous\", &block\n block ||= proc { skip \"(no tests defined)\" }\n\n @specs ||= 0\n @specs += 1\n\n name = \"test_%04d_%s\" % [ @specs, desc ]\n\n undef_klasses = self.children.reject { |c| c.public_method_defined? name }\n\n define_method name, &block\n\n undef_klasses.each do |undef_klass|\n undef_klass.send :undef_method, name\n end\n\n name\n end",
"def run_tests\n puts \"Running exactly #{@spec.size} tests.\"\n @spec.each do |test_case|\n sleep test_case.wait_before_request\n response = send_request_for(test_case)\n Checker.available_plugins.each do |plugin|\n result = @expectation.check(plugin, response, test_case)\n if not result.success?\n putc \"F\"\n @results << result\n break\n else\n if plugin == Checker.available_plugins.last\n @results << result\n putc \".\"\n end\n end\n end\n end\n end",
"def test *errs, &bk\n yield\n rescue Exception => ex\n return nil if errs.empty?\n errs.each do |klass|\n return nil if klass.instance_of?(Module) ? ex.kind_of?(klass) : ex.is_a?(klass)\n end\n raise\n end",
"def test_entry\n raise \"Implement this method in your test class\"\n end",
"def default_test\n end",
"def setup\n # wipe and recreate .test directory \n Dir.chdir(@@start_dir)\n if File.exist?('.test')\n FileUtils.rm_rf('.test')\n end\n \n # NOTE: I don't think we want to kill .repository in pwd from running test\n if File.exist?('.repository')\n FileUtils.rm_rf('.repository')\n end\n Dir.mkdir('.test')\n Dir.chdir('.test')\n end",
"def default_test\n end",
"def setup\n # Do nothing\n end",
"def setup\n # Do nothing\n end",
"def setup\n # Do nothing\n end",
"def setup\n # Do nothing\n end",
"def setup\n # Do nothing\n end",
"def setup\n # Do nothing\n end",
"def setup\n # Do nothing\n end",
"def test\n require File.expand_path(File.join(File.dirname(__FILE__), \"tests/tests\"))\n Test.run\nend",
"def run_test(skip_post_process, original_results_directory, run_coverage = true)\n puts ' ========================= Starting Run for Minitest (and coverage) ============================'\n # not sure what @base_dir has to be right now\n pre_process_minitest(original_results_directory)\n\n # Specify the minitest reporters\n require 'minitest/reporters'\n Minitest::Reporters.use! [\n Minitest::Reporters::HtmlReporter.new,\n Minitest::Reporters::JUnitReporter.new\n ]\n\n if run_coverage\n # Load in the coverage before loading the test files\n SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new(\n [\n SimpleCov::Formatter::HTMLFormatter\n ]\n )\n\n SimpleCov.start do\n # Track all files inside of @base_dir\n track_files \"#{@base_dir}/**/*.rb\"\n\n use_merging false\n\n # Exclude all files outside of @base_dir\n root_filter = nil\n add_filter do |src|\n root_filter ||= /\\A#{Regexp.escape(@base_dir + File::SEPARATOR)}/io\n src.filename !~ root_filter\n end\n end\n end\n\n num_tests = 0\n openstudio_version = OpenStudio::VersionString.new(OpenStudio.openStudioVersion)\n Dir[\"#{@base_dir}/**/*_Test.rb\", \"#{@base_dir}/**/*_test.rb\"].uniq.each do |file|\n file = File.expand_path(file)\n measure_dir = File.expand_path(File.join(File.dirname(file), '..'))\n\n # check measure xml\n compatible = {\n compatible: true,\n message: '',\n openstudio_version: openstudio_version.str,\n measure_min_version: 'None',\n measure_max_version: 'None',\n loaded: false,\n load_errors: []\n }\n begin\n measure = OpenStudio::BCLMeasure.new(measure_dir)\n compatible[:measure_name] = measure.className\n measure.files.each do |f|\n if f.fileName == 'measure.rb'\n if !f.minCompatibleVersion.empty?\n min_version = f.minCompatibleVersion.get\n compatible[:measure_min_version] = min_version.str\n if openstudio_version < min_version\n compatible[:compatible] = false\n compatible[:message] = \"OpenStudio Version #{openstudio_version.str} < Min Version #{min_version.str}\"\n end\n end\n if !f.maxCompatibleVersion.empty?\n max_version = f.maxCompatibleVersion.get\n compatible[:measure_max_version] = max_version.str\n if openstudio_version > max_version\n compatible[:compatible] = false\n compatible[:message] = \"OpenStudio Version #{openstudio_version.str} > Max Version #{max_version.str}\"\n end\n end\n end\n end\n rescue StandardError => e\n compatible[:compatible] = false\n compatible[:message] = e.message\n end\n\n if !compatible[:compatible]\n puts \"Measure not compatible: #{measure_dir}, #{compatible[:message]}\"\n next\n end\n\n # load test\n puts \"Loading file for testing: #{file}\"\n begin\n load file\n compatible[:loaded] = true\n num_tests += 1\n rescue StandardError, LoadError => e\n compatible[:load_errors] << e.message\n end\n\n ensure\n # Write out the compatibility\n # write out to a file that the measure is not applicable\n os_compatible_file = \"#{@base_dir}/test_results/minitest/compatibility/#{compatible[:measure_name]}.json\"\n puts os_compatible_file\n FileUtils.mkdir_p File.dirname(os_compatible_file) unless Dir.exist? File.dirname(os_compatible_file)\n File.open(os_compatible_file, 'w') do |f|\n f << JSON.pretty_generate(compatible)\n end\n end\n\n if num_tests < 1\n puts 'No tests found'\n\n if run_coverage\n # This doesn't seem to be working, it doesn't save off the .resultset.json.\n begin\n simplecov_exit_status = SimpleCov.end_now\n rescue NoMethodError\n # in case using some other version of SimpleCov\n SimpleCov.set_exit_exception\n exit_status = SimpleCov.exit_status_from_exception\n SimpleCov.result.format!\n simplecov_exit_status = SimpleCov.process_result(SimpleCov.result, exit_status)\n end\n end\n\n if skip_post_process\n return true\n else\n return post_process_results(original_results_directory)\n end\n else\n puts \"Inspected #{num_tests} tests\"\n end\n\n # Now call run on the loaded files. Note that the Minitest.autorun method has been nulled out in the\n # openstudio_measure_tester.rb file, so it will not run.\n begin\n Minitest.run ['--verbose']\n rescue StandardError => e\n puts\n puts '!!!!!!!!!!!!!!!!!!!!! Minitest Error Occurred !!!!!!!!!!!!!!!!!!!!!'\n puts e.message\n puts e.backtrace\n puts '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'\n puts\n end\n\n # Shutdown SimpleCov and collect results\n # This will set SimpleCov.running to false which will prevent from running again at_exit\n if run_coverage\n begin\n simplecov_exit_status = SimpleCov.end_now\n rescue NoMethodError\n # in case using some other version of SimpleCov\n SimpleCov.set_exit_exception\n exit_status = SimpleCov.exit_status_from_exception\n SimpleCov.result.format!\n simplecov_exit_status = SimpleCov.process_result(SimpleCov.result, exit_status)\n end\n end\n\n if skip_post_process\n return true\n else\n return post_process_results(original_results_directory)\n end\n end",
"def test_assert_raises_skip\n @assertion_count = 0\n\n assert_triggered \"skipped\", Minitest::Skip do\n @tc.assert_raises ArgumentError do\n begin\n raise \"blah\"\n rescue\n skip \"skipped\"\n end\n end\n end\n end",
"def process_test_cases\n raise NotImplementedError, 'You must implement this'\n end",
"def make_test_cmd multi = false # :nodoc:\n framework = SUPPORTED_TEST_FRAMEWORKS[testlib]\n raise \"unsupported test framework #{testlib}\" unless framework\n\n tests = [\"rubygems\", framework] +\n test_globs.map { |g| Dir.glob(g) }.flatten\n tests.map! {|f| %(require \"#{f}\")}\n\n cmd = \"#{Hoe::RUBY_FLAGS} -e '#{tests.join(\"; \")}' #{FILTER}\"\n\n if multi then\n ENV['EXCLUDED_VERSIONS'] = multiruby_skip.join \":\"\n cmd = \"-S multiruby #{cmd}\"\n end\n\n cmd\n end",
"def run_one_test(session)\n \n end",
"def define_test_tasks\n default_tasks = []\n\n task :test\n\n if File.directory? \"test\" then\n case testlib\n when :minitest then\n require \"minitest/test_task\" # in minitest 5.16+\n\n test_prelude = self.test_prelude\n self.test_task = Minitest::TestTask.create :test do |t|\n t.test_prelude = test_prelude\n t.libs.prepend Hoe.include_dirs.uniq\n end\n when :none then\n # do nothing\n else\n warn \"Unsupported? Moving to Minitest::TestTask. Let me know if you use this!\"\n end\n\n desc \"Run the test suite using multiruby.\"\n task :multi do\n skip = with_config do |config, _|\n config[\"multiruby_skip\"] + self.multiruby_skip\n end\n\n ENV[\"EXCLUDED_VERSIONS\"] = skip.join(\":\")\n system \"multiruby -S rake\"\n end\n\n default_tasks << :test\n end\n\n desc \"Run the default task(s).\"\n task :default => default_tasks\n\n desc \"Run ZenTest against the package.\"\n task :audit do\n libs = %w[lib test ext].join(File::PATH_SEPARATOR)\n sh \"zentest -I=#{libs} #{spec.files.grep(/^(lib|test)/).join(\" \")}\"\n end\n end",
"def skip(skip_msg = nil, called_from = nil)\n raise Result::TestSkipped, (skip_msg || \"\"), called_from\n end",
"def start_coverage\n return unless sporkless? # Something funny about numbers right now under spork\n require 'simplecov'\nend",
"def fail_now\n fail\n raise TestFail\n end",
"def pass_flaky_test(example)\n example.clear_exception\n add_to_summary(:quarantined_tests, example.id)\n end",
"def test(argv = ARGV)\n if spec_file?(argv) && defined?(RSpec)\n # disable autorun in case the user left it in spec_helper.rb\n RSpec::Core::Runner.disable_autorun!\n exit RSpec::Core::Runner.run(argv)\n else\n Zeus::M.run(argv)\n end\n end",
"def run_test_with(test_opts = {})\n opts = default_options.merge test_opts\n @browser = Watir::Browser.new browser_type, opts\n assert_http_proxy\n assert_https_proxy\n end",
"def test_bad_chicken_deps\n check_deps_fail BadChickenBall unless `/usr/bin/which csc`.chomp.empty?\n end",
"def abort_test!\n throw 'abort_test!'\n end",
"def test_multiple_subcribers_no_proxy\n command = \"cd #{dirs.tmpdir}; #{CONFIG_SOURCE}; #{get_java_command}\"\n multiple_subcribers_common(command, command, false)\n end",
"def test_should_only_inject_test_runner_mediator_for_lauching_dtr\n DTR.inject\n assert Test::Unit::UI::TestRunnerMediator.respond_to?(:reject_dtr)\n assert Test::Unit::UI::TestRunnerMediator.method_defined?(:run_suite_without_dtr_injection)\n assert Test::Unit::UI::TestRunnerMediator.private_method_defined?(:create_result_without_thread_safe)\n assert !Test::Unit::TestCase.respond_to?(:reject_dtr)\n assert !Test::Unit::TestCase.method_defined?(:run_without_dtr_injection)\n end",
"def run_tests()\n @loaded_modules.each do |module_name|\n module_class = Object.const_get(module_name.to_s).const_get(module_name.to_s).new\n if module_class.respond_to?( 'run' )\n module_class.run()\n else\n puts \"\\e[31mWARNING\\e[0m: Module #{(module_name)} not implemented\"\n end\n end\n end",
"def no_test\n flunk \"Test hasn't been written yet.\"\n end",
"def test_setup\r\n \r\n end",
"def beaker_tests\n ENV['TESTS'] || ENV['TEST'] || 'tests'\nend",
"def test_should_only_inject_test_runner_mediator_for_lauching_dtr\n DTR.inject\n assert Test::Unit::UI::TestRunnerMediator.respond_to?(:reject_dtr)\n assert Test::Unit::UI::TestRunnerMediator.method_defined?(:run_suite_without_dtr_injection)\n assert Test::Unit::UI::TestRunnerMediator.private_method_defined?(:create_result_without_thread_safe)\n end",
"def initialize_test\n self.multiruby_skip ||= []\n self.testlib ||= :testunit\n self.rspec_dirs ||= %w(spec lib)\n self.rspec_options ||= []\n end",
"def test_frameworks; end",
"def test_nonhero_is_vulnerable\n end",
"def default_test\r\n end"
] | [
"0.7830533",
"0.7767795",
"0.59468395",
"0.59468395",
"0.57822424",
"0.5688788",
"0.5685269",
"0.56059647",
"0.55875677",
"0.5562946",
"0.5555831",
"0.55513185",
"0.55316496",
"0.55299443",
"0.55291027",
"0.5526648",
"0.55130935",
"0.547739",
"0.5475869",
"0.5448887",
"0.5441296",
"0.54400754",
"0.54361194",
"0.5419593",
"0.54084045",
"0.54060894",
"0.54060894",
"0.5401677",
"0.53949976",
"0.5391064",
"0.53798443",
"0.5366753",
"0.53142154",
"0.5272845",
"0.5263667",
"0.5260618",
"0.5260618",
"0.52515113",
"0.52495915",
"0.52495915",
"0.5248353",
"0.5238148",
"0.52335835",
"0.5230537",
"0.5222543",
"0.52180797",
"0.5212025",
"0.5206913",
"0.5206913",
"0.5200305",
"0.51839775",
"0.51832074",
"0.5178352",
"0.5173532",
"0.5173532",
"0.51724863",
"0.5160711",
"0.51382726",
"0.5125348",
"0.5125348",
"0.5121168",
"0.5087528",
"0.50612164",
"0.5056586",
"0.50340647",
"0.50269395",
"0.49999657",
"0.49895984",
"0.49895984",
"0.49895984",
"0.49895984",
"0.49895984",
"0.49895984",
"0.49895984",
"0.49886778",
"0.49707702",
"0.49655873",
"0.49641186",
"0.49435335",
"0.49379814",
"0.49330443",
"0.49322966",
"0.49297878",
"0.49281386",
"0.49273628",
"0.4926373",
"0.49258575",
"0.49160445",
"0.49160072",
"0.49145824",
"0.49116042",
"0.49112868",
"0.49049193",
"0.49014977",
"0.4894673",
"0.48930356",
"0.48786497",
"0.48748174",
"0.48673654",
"0.48435912"
] | 0.7847089 | 0 |
lexer rules lexer rule t__6! (T__6) (in CSV.g) | def t__6!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 1 )
type = T__6
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 7:8: '+'
match( 0x2b )
@state.type = type
@state.channel = channel
ensure
# -> uncomment the next line to manually enable rule tracing
# trace_out( __method__, 1 )
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def t__63!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n type = T__63\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 8:9: '\\\\n'\n match( 0xa )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n end",
"def t__26!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 19 )\n\n type = T__26\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 35:9: 'g'\n match( 0x67 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 19 )\n\n end",
"def t__65!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__65\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 10:9: ']'\n match( 0x5d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__71!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 16)\n\n type = T__71\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 16)\n\n end",
"def t__15!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__15\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:9: 'u'\n match( 0x75 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__16!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 7)\n\n type = T__16\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ','\n match(?,)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 7)\n\n end",
"def t__15!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 6)\n\n type = T__15\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 21:9: ')'\n match(?))\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 6)\n\n end",
"def t__67!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 12)\n\n type = T__67\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: 'lexer'\n match(\"lexer\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 12)\n\n end",
"def t__36!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 29 )\n\n type = T__36\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 45:9: 'T'\n match( 0x54 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 29 )\n\n end",
"def t__43!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n type = T__43\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 52:9: 'u'\n match( 0x75 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end",
"def t__12!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 3)\n\n type = T__12\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 3)\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 9:9: ')'\n match( 0x29 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def t__80!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 25)\n\n type = T__80\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 31:9: ','\n match(?,)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 25)\n\n end",
"def t__62!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n type = T__62\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 7:9: '\\\\r'\n match( 0xd )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end",
"def generalized_delimiter!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 110 )\n\n \n # - - - - main rule block - - - -\n # at line 991:3: ( NESTED_PARENTHESES | NESTED_BRACKETS | NESTED_BRACES | NESTED_ANGLES | ( '!' | '@' | '~' | '`' | '^' | '&' | '*' | '-' | '+' | '=' | '|' | ':' | ';' | '.' | ',' | '?' | '/' | '\\\"' | '\\\\'' ) )\n alt_65 = 5\n case look_65 = @input.peek( 1 )\n when 0x28 then alt_65 = 1\n when 0x5b then alt_65 = 2\n when 0x7b then alt_65 = 3\n when 0x3c then alt_65 = 4\n when 0x21, 0x22, 0x26, 0x27, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x3a, 0x3b, 0x3d, 0x3f, 0x40, 0x5e, 0x60, 0x7c, 0x7e then alt_65 = 5\n else\n raise NoViableAlternative( \"\", 65, 0 )\n end\n case alt_65\n when 1\n # at line 991:5: NESTED_PARENTHESES\n nested_parentheses!\n\n when 2\n # at line 992:5: NESTED_BRACKETS\n nested_brackets!\n\n when 3\n # at line 993:5: NESTED_BRACES\n nested_braces!\n\n when 4\n # at line 994:5: NESTED_ANGLES\n nested_angles!\n\n when 5\n # at line 995:5: ( '!' | '@' | '~' | '`' | '^' | '&' | '*' | '-' | '+' | '=' | '|' | ':' | ';' | '.' | ',' | '?' | '/' | '\\\"' | '\\\\'' )\n if @input.peek( 1 ).between?( 0x21, 0x22 ) || @input.peek( 1 ).between?( 0x26, 0x27 ) || @input.peek( 1 ).between?( 0x2a, 0x2f ) || @input.peek( 1 ).between?( 0x3a, 0x3b ) || @input.peek(1) == 0x3d || @input.peek( 1 ).between?( 0x3f, 0x40 ) || @input.peek(1) == 0x5e || @input.peek(1) == 0x60 || @input.peek(1) == 0x7c || @input.peek(1) == 0x7e\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # --> action\n scan_until_delimiter( @input.peek( -1 ) ) \n # <-- action\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 110 )\n\n end",
"def t__23!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = T__23\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 28:9: 'l'\n match( 0x6c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end",
"def h_6!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n\n type = H6\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 333:5: 'h6'\n match( \"h6\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n\n end",
"def t__35!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 24 )\n\n type = T__35\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 40:9: 'n'\n match( 0x6e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 24 )\n\n end",
"def t__14!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = T__14\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: 'U'\n match( 0x55 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 23 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 39:9: 'N'\n match( 0x4e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 23 )\n\n end",
"def num_f!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n\n\n type = NUM_F\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 47:8: ( '0' .. '9' )+ '.' ( '0' .. '9' )+\n # at file 47:8: ( '0' .. '9' )+\n match_count_5 = 0\n while true\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0.between?( 0x30, 0x39 ) )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n match( 0x2e )\n # at file 47:24: ( '0' .. '9' )+\n match_count_6 = 0\n while true\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0.between?( 0x30, 0x39 ) )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_6 > 0 and break\n eee = EarlyExit(6)\n\n\n raise eee\n end\n match_count_6 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n\n end",
"def t__26!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 15 )\n\n type = T__26\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 31:9: 'Q'\n match( 0x51 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 15 )\n\n end",
"def delim!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 13 )\n\n\n\n type = DELIM\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 305:3: '|'\n match( 0x7c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 13 )\n\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 151:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 43 )\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 368:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 43 )\n\n end",
"def t__11!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n type = T__11\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 7:9: '('\n match( 0x28 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end",
"def line_comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 78 )\n\n\n\n type = LINE_COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 608:8: '#' (~ ( '\\\\n' | '\\\\r' ) )* ( '\\\\r' )? '\\\\n'\n match( 0x23 )\n # at line 608:12: (~ ( '\\\\n' | '\\\\r' ) )*\n while true # decision 25\n alt_25 = 2\n look_25_0 = @input.peek( 1 )\n\n if ( look_25_0.between?( 0x0, 0x9 ) || look_25_0.between?( 0xb, 0xc ) || look_25_0.between?( 0xe, 0xffff ) )\n alt_25 = 1\n\n end\n case alt_25\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x0, 0x9 ) || @input.peek( 1 ).between?( 0xb, 0xc ) || @input.peek( 1 ).between?( 0xe, 0xffff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n break # out of loop for decision 25\n end\n end # loop for decision 25\n\n # at line 608:26: ( '\\\\r' )?\n alt_26 = 2\n look_26_0 = @input.peek( 1 )\n\n if ( look_26_0 == 0xd )\n alt_26 = 1\n end\n case alt_26\n when 1\n # at line 608:26: '\\\\r'\n match( 0xd )\n\n end\n match( 0xa )\n\n # --> action\n channel=HIDDEN;\n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 78 )\n\n\n end",
"def t__36!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 25 )\n\n type = T__36\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 41:9: 'H'\n match( 0x48 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 25 )\n\n end",
"def t__39!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 32 )\n\n type = T__39\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 48:9: 'o'\n match( 0x6f )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 32 )\n\n end",
"def t__89!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 34)\n\n type = T__89\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 40:9: '>'\n match(?>)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 34)\n\n end",
"def t__35!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 28 )\n\n type = T__35\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 44:9: 'c'\n match( 0x63 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 28 )\n\n end",
"def t__36!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 7 )\n\n\n\n type = T__36\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 13:9: '*'\n match( 0x2a )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 7 )\n\n\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 11:9: '('\n match( 0x28 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 5 )\n\n\n end",
"def t__35!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n\n\n type = T__35\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 12:9: ')'\n match( 0x29 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n\n end",
"def t__52!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n type = T__52\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 11:9: '}'\n match( 0x7d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 5 )\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: 'a'\n match( 0x61 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end",
"def t__18!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 9)\n\n type = T__18\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 24:9: '&'\n match(?&)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 9)\n\n end",
"def t__87!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 32)\n\n type = T__87\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 38:9: '~'\n match(?~)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 32)\n\n end",
"def token!\r\n # at line 1:8: ( T__6 | NUMBER | SPACE )\r\n alt_3 = 3\r\n case look_3 = @input.peek( 1 )\r\n when 0x2b then alt_3 = 1\r\n when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39 then alt_3 = 2\r\n when 0x20 then alt_3 = 3\r\n else\r\n raise NoViableAlternative( \"\", 3, 0 )\r\n end\r\n case alt_3\r\n when 1\r\n # at line 1:10: T__6\r\n t__6!\r\n\r\n when 2\r\n # at line 1:15: NUMBER\r\n number!\r\n\r\n when 3\r\n # at line 1:22: SPACE\r\n space!\r\n\r\n end\r\n end",
"def t__33!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n type = T__33\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 42:9: 'n'\n match( 0x6e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n end",
"def t__37!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n type = T__37\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 46:9: 't'\n match( 0x74 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n end",
"def t__83!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 28)\n\n type = T__83\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 34:9: ')'\n match(?))\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 28)\n\n end",
"def t__57!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 46 )\n\n type = T__57\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 62:9: 'g'\n match( 0x67 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 46 )\n\n end",
"def t__25!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 14 )\n\n type = T__25\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 30:9: 'i'\n match( 0x69 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 14 )\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 57:8: ','\n match( 0x2c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n\n end",
"def t__72!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 17)\n\n type = T__72\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 23:9: '}'\n match(?})\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 17)\n\n end",
"def t__78!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 23)\n\n type = T__78\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 29:9: ':'\n match(?:)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 23)\n\n end",
"def t__43!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 32 )\n\n type = T__43\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 48:9: 'd'\n match( 0x64 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 32 )\n\n end",
"def t__23!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n type = T__23\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 32:9: 'm'\n match( 0x6d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 27 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 43:9: 'C'\n match( 0x43 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 27 )\n\n end",
"def t__18!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n type = T__18\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 27:9: 'L'\n match( 0x4c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n end",
"def t__68!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 13)\n\n type = T__68\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: 'parser'\n match(\"parser\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 13)\n\n end",
"def t__74!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 19)\n\n type = T__74\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 25:9: '*'\n match(?*)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 19)\n\n end",
"def t__93!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 38)\n\n type = T__93\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 44:9: '$'\n match(?$)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 38)\n\n end",
"def t__9!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n type = T__9\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:8: 'h'\n match( 0x68 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n end",
"def t__22!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n type = T__22\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 27:9: 'L'\n match( 0x4c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n end",
"def t__33!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 22 )\n\n type = T__33\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 38:9: 'y'\n match( 0x79 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 22 )\n\n end",
"def t__11!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__11\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:9: 'e'\n match( 0x65 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 4)\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: '::='\n match(\"::=\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 4)\n\n end",
"def t__40!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n\n\n type = T__40\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 17:9: ','\n match( 0x2c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n\n end",
"def comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 15)\n\n type = COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 148:3: ( ( '#' | '//' ) (~ '\\\\n' )* | '/*' ( . )* '*/' )\n alt_10 = 2\n look_10_0 = @input.peek(1)\n\n if (look_10_0 == ?#) \n alt_10 = 1\n elsif (look_10_0 == ?/) \n look_10_2 = @input.peek(2)\n\n if (look_10_2 == ?/) \n alt_10 = 1\n elsif (look_10_2 == ?*) \n alt_10 = 2\n else\n nvae = NoViableAlternative(\"\", 10, 2)\n raise nvae\n end\n else\n nvae = NoViableAlternative(\"\", 10, 0)\n raise nvae\n end\n case alt_10\n when 1\n # at line 148:5: ( '#' | '//' ) (~ '\\\\n' )*\n # at line 148:5: ( '#' | '//' )\n alt_7 = 2\n look_7_0 = @input.peek(1)\n\n if (look_7_0 == ?#) \n alt_7 = 1\n elsif (look_7_0 == ?/) \n alt_7 = 2\n else\n nvae = NoViableAlternative(\"\", 7, 0)\n raise nvae\n end\n case alt_7\n when 1\n # at line 148:7: '#'\n match(?#)\n\n when 2\n # at line 148:13: '//'\n match(\"//\")\n\n end\n # at line 148:20: (~ '\\\\n' )*\n while true # decision 8\n alt_8 = 2\n look_8_0 = @input.peek(1)\n\n if (look_8_0.between?(0x0000, ?\\t) || look_8_0.between?(0x000B, 0xFFFF)) \n alt_8 = 1\n\n end\n case alt_8\n when 1\n # at line 148:20: ~ '\\\\n'\n if @input.peek(1).between?(0x0000, ?\\t) || @input.peek(1).between?(0x000B, 0x00FF)\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n else\n break # out of loop for decision 8\n end\n end # loop for decision 8\n\n when 2\n # at line 149:5: '/*' ( . )* '*/'\n match(\"/*\")\n # at line 149:10: ( . )*\n while true # decision 9\n alt_9 = 2\n look_9_0 = @input.peek(1)\n\n if (look_9_0 == ?*) \n look_9_1 = @input.peek(2)\n\n if (look_9_1 == ?/) \n alt_9 = 2\n elsif (look_9_1.between?(0x0000, ?.) || look_9_1.between?(?0, 0xFFFF)) \n alt_9 = 1\n\n end\n elsif (look_9_0.between?(0x0000, ?)) || look_9_0.between?(?+, 0xFFFF)) \n alt_9 = 1\n\n end\n case alt_9\n when 1\n # at line 149:10: .\n match_any\n\n else\n break # out of loop for decision 9\n end\n end # loop for decision 9\n match(\"*/\")\n\n end\n \n @state.type = type\n @state.channel = channel\n # --> action\n skip \n # <-- action\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 15)\n\n end",
"def t__56!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 45 )\n\n type = T__56\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 61:9: 'G'\n match( 0x47 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 45 )\n\n end",
"def t__14!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 5)\n\n type = T__14\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:9: '('\n match(?()\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 5)\n\n end",
"def t__31!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 24 )\n\n type = T__31\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 40:9: 'i'\n match( 0x69 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 24 )\n\n end",
"def t__90!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 35)\n\n type = T__90\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 41:9: '.'\n match(?.)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 35)\n\n end",
"def tokens!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 59)\n\n type = TOKENS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 596:4: 'tokens' WS_LOOP '{'\n match(\"tokens\")\n ws_loop!\n match(?{)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 59)\n\n end",
"def lt!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 27 )\n\n type = LT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 161:6: '<'\n match( 0x3c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 27 )\n\n end",
"def t__46!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 35 )\n\n type = T__46\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 51:9: 'C'\n match( 0x43 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 35 )\n\n end",
"def t__33!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n\n\n type = T__33\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 10:9: '&&'\n match( \"&&\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n\n end",
"def t__44!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 33 )\n\n type = T__44\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 49:9: 'V'\n match( 0x56 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 33 )\n\n end",
"def t__41!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n type = T__41\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 46:9: 'm'\n match( 0x6d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n end",
"def t__53!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 24 )\n\n\n\n type = T__53\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 30:9: '^'\n match( 0x5e )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 24 )\n\n\n end",
"def t__37!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n type = T__37\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 42:9: 'h'\n match( 0x68 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n end",
"def t__30!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n\n\n type = T__30\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 7:9: '!'\n match( 0x21 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n\n end",
"def ident!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 47 )\n\n\n\n type = IDENT\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 226:8: ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' )*\n if @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n # at line 226:40: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' )*\n while true # decision 4\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x30, 0x39 ) || look_4_0.between?( 0x41, 0x5a ) || look_4_0 == 0x5f || look_4_0.between?( 0x61, 0x7a ) )\n alt_4 = 1\n\n end\n case alt_4\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n break # out of loop for decision 4\n end\n end # loop for decision 4\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 47 )\n\n\n end",
"def lt!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 55 )\n\n type = LT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 382:6: '<'\n match( 0x3c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 55 )\n\n end",
"def td!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 19 )\n\n type = TD\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 345:5: 'td'\n match( \"td\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 19 )\n\n end",
"def t__31!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 20 )\n\n type = T__31\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 36:9: 'f'\n match( 0x66 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 20 )\n\n end",
"def th!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 52 )\n\n type = TH\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n d = nil\n\n\n \n # - - - - main rule block - - - -\n # at line 351:5: d= ( ( DIGIT )* ) DIGIT ( 'T' | 't' ) ( 'H' | 'h' )\n # at line 351:7: ( ( DIGIT )* )\n # at line 351:8: ( DIGIT )*\n # at line 351:8: ( DIGIT )*\n while true # decision 4\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x30, 0x39 ) )\n look_4_1 = @input.peek( 2 )\n\n if ( look_4_1.between?( 0x30, 0x39 ) )\n alt_4 = 1\n\n end\n\n end\n case alt_4\n when 1\n # at line 351:8: DIGIT\n digit!\n\n else\n break # out of loop for decision 4\n end\n end # loop for decision 4\n\n digit!\n if @input.peek(1) == 0x54 || @input.peek(1) == 0x74\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n if @input.peek(1) == 0x48 || @input.peek(1) == 0x68\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 52 )\n\n end",
"def t__17!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n type = T__17\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: 't'\n match( 0x74 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end",
"def chunk_char!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 14 )\n\n \n # - - - - main rule block - - - -\n # at line 68:3: (~ ( '>' | '<' | '#' | '`' | '\\\"' | '\\\\'' | '|' | '(' | ')' | '$' | ';' | ' ' | '?' | '*' | '~' | '\\\\\\\\' | '\\\\t' | '{' | '}' | '\\\\n' | '\\\\r' ) | '\\\\\\\\' . )\n alt_23 = 2\n look_23_0 = @input.peek( 1 )\n\n if ( look_23_0.between?( 0x0, 0x8 ) || look_23_0.between?( 0xb, 0xc ) || look_23_0.between?( 0xe, 0x1f ) || look_23_0 == 0x21 || look_23_0.between?( 0x25, 0x26 ) || look_23_0.between?( 0x2b, 0x3a ) || look_23_0 == 0x3d || look_23_0.between?( 0x40, 0x5b ) || look_23_0.between?( 0x5d, 0x5f ) || look_23_0.between?( 0x61, 0x7a ) || look_23_0.between?( 0x7f, 0xffff ) )\n alt_23 = 1\n elsif ( look_23_0 == 0x5c )\n alt_23 = 2\n else\n raise NoViableAlternative( \"\", 23, 0 )\n end\n case alt_23\n when 1\n # at line 68:5: ~ ( '>' | '<' | '#' | '`' | '\\\"' | '\\\\'' | '|' | '(' | ')' | '$' | ';' | ' ' | '?' | '*' | '~' | '\\\\\\\\' | '\\\\t' | '{' | '}' | '\\\\n' | '\\\\r' )\n if @input.peek( 1 ).between?( 0x0, 0x8 ) || @input.peek( 1 ).between?( 0xb, 0xc ) || @input.peek( 1 ).between?( 0xe, 0x1f ) || @input.peek(1) == 0x21 || @input.peek( 1 ).between?( 0x25, 0x26 ) || @input.peek( 1 ).between?( 0x2b, 0x3a ) || @input.peek(1) == 0x3d || @input.peek( 1 ).between?( 0x40, 0x5b ) || @input.peek( 1 ).between?( 0x5d, 0x5f ) || @input.peek( 1 ).between?( 0x61, 0x7a ) || @input.peek( 1 ).between?( 0x7f, 0xff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 70:5: '\\\\\\\\' .\n match( 0x5c )\n match_any\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 14 )\n\n end",
"def t__54!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 43 )\n\n type = T__54\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 59:9: 'W'\n match( 0x57 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 43 )\n\n end",
"def less!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n type = LESS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 157:8: '<'\n match( 0x3c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end",
"def t__18!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 7 )\n\n type = T__18\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 23:9: 'O'\n match( 0x4f )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 7 )\n\n end",
"def t__31!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n\n\n type = T__31\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 8:9: '!='\n match( \"!=\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n\n end",
"def chunk!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 8 )\n\n type = CHUNK\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 47:3: ( ( '-' )+ ( CHUNK_CHAR )* | ( CHUNK_CHAR )+ )\n alt_16 = 2\n look_16_0 = @input.peek( 1 )\n\n if ( look_16_0 == 0x2d )\n alt_16 = 1\n elsif ( look_16_0.between?( 0x0, 0x8 ) || look_16_0.between?( 0xb, 0xc ) || look_16_0.between?( 0xe, 0x1f ) || look_16_0 == 0x21 || look_16_0.between?( 0x25, 0x26 ) || look_16_0.between?( 0x2b, 0x2c ) || look_16_0.between?( 0x2e, 0x3a ) || look_16_0 == 0x3d || look_16_0.between?( 0x40, 0x5f ) || look_16_0.between?( 0x61, 0x7a ) || look_16_0.between?( 0x7f, 0xffff ) )\n alt_16 = 2\n else\n raise NoViableAlternative( \"\", 16, 0 )\n end\n case alt_16\n when 1\n # at line 47:5: ( '-' )+ ( CHUNK_CHAR )*\n # at file 47:5: ( '-' )+\n match_count_13 = 0\n while true\n alt_13 = 2\n look_13_0 = @input.peek( 1 )\n\n if ( look_13_0 == 0x2d )\n alt_13 = 1\n\n end\n case alt_13\n when 1\n # at line 47:5: '-'\n match( 0x2d )\n\n else\n match_count_13 > 0 and break\n eee = EarlyExit(13)\n\n\n raise eee\n end\n match_count_13 += 1\n end\n\n # at line 47:10: ( CHUNK_CHAR )*\n while true # decision 14\n alt_14 = 2\n look_14_0 = @input.peek( 1 )\n\n if ( look_14_0.between?( 0x0, 0x8 ) || look_14_0.between?( 0xb, 0xc ) || look_14_0.between?( 0xe, 0x1f ) || look_14_0 == 0x21 || look_14_0.between?( 0x25, 0x26 ) || look_14_0.between?( 0x2b, 0x3a ) || look_14_0 == 0x3d || look_14_0.between?( 0x40, 0x5f ) || look_14_0.between?( 0x61, 0x7a ) || look_14_0.between?( 0x7f, 0xffff ) )\n alt_14 = 1\n\n end\n case alt_14\n when 1\n # at line 47:10: CHUNK_CHAR\n chunk_char!\n\n else\n break # out of loop for decision 14\n end\n end # loop for decision 14\n # --> action\n type = SWITCH \n # <-- action\n\n when 2\n # at line 48:5: ( CHUNK_CHAR )+\n # at file 48:5: ( CHUNK_CHAR )+\n match_count_15 = 0\n while true\n alt_15 = 2\n look_15_0 = @input.peek( 1 )\n\n if ( look_15_0.between?( 0x0, 0x8 ) || look_15_0.between?( 0xb, 0xc ) || look_15_0.between?( 0xe, 0x1f ) || look_15_0 == 0x21 || look_15_0.between?( 0x25, 0x26 ) || look_15_0.between?( 0x2b, 0x3a ) || look_15_0 == 0x3d || look_15_0.between?( 0x40, 0x5f ) || look_15_0.between?( 0x61, 0x7a ) || look_15_0.between?( 0x7f, 0xffff ) )\n alt_15 = 1\n\n end\n case alt_15\n when 1\n # at line 48:5: CHUNK_CHAR\n chunk_char!\n\n else\n match_count_15 > 0 and break\n eee = EarlyExit(15)\n\n\n raise eee\n end\n match_count_15 += 1\n end\n\n # --> action\n @cmd_start and type = COMMAND_NAME \n # <-- action\n\n end\n \n @state.type = type\n @state.channel = channel\n # --> action\n @cmd_start = false \n # <-- action\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 8 )\n\n end",
"def t__49!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 38 )\n\n type = T__49\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 54:9: 'a'\n match( 0x61 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 38 )\n\n end",
"def ivar!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 101 )\n\n type = IVAR\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 918:5: '@' ( '$' | '_' | 'a' .. 'z' | 'A' .. 'Z' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' )*\n match( 0x40 )\n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # at line 919:5: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' )*\n while true # decision 30\n alt_30 = 2\n look_30_0 = @input.peek( 1 )\n\n if ( look_30_0 == 0x24 || look_30_0.between?( 0x30, 0x39 ) || look_30_0.between?( 0x41, 0x5a ) || look_30_0 == 0x5f || look_30_0.between?( 0x61, 0x7a ) )\n alt_30 = 1\n\n end\n case alt_30\n when 1\n # at line \n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n break # out of loop for decision 30\n end\n end # loop for decision 30\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 101 )\n\n end",
"def tilde!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 56 )\n\n type = TILDE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 177:9: '~'\n match( 0x7e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 56 )\n\n end",
"def slash!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 96 )\n\n type = SLASH\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 867:3: ( '//' (~ ( '\\\\n' | '\\\\r' ) )* | '/*' ( . )* '*/' | {...}? => '/' (~ ( '/' | '*' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' ) | '\\\\\\\\' . | {...}? => INTERPOLATION | '#' ) (~ ( '/' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' ) | '\\\\\\\\' . | {...}? => INTERPOLATION | '#' )* '/' ( 'a' .. 'z' )* | {...}? => '/' ( '=' | ) )\n alt_12 = 4\n look_12_0 = @input.peek( 1 )\n\n if ( look_12_0 == 0x2f )\n look_12_1 = @input.peek( 2 )\n\n if ( look_12_1 == 0x2f )\n alt_12 = 1\n elsif ( look_12_1 == 0x2a )\n alt_12 = 2\n elsif ( look_12_1 == 0x3d ) and ( ( ( @value_expected ) ) or ( ( !@value_expected ) ) )\n look_12_4 = @input.peek( 3 )\n\n if ( look_12_4.between?( 0x0, 0x9 ) || look_12_4.between?( 0xb, 0xc ) || look_12_4.between?( 0xe, 0xffff ) ) and ( ( @value_expected ) )\n alt_12 = 3\n else\n alt_12 = 4\n end\n elsif ( look_12_1.between?( 0x0, 0x9 ) || look_12_1.between?( 0xb, 0xc ) || look_12_1.between?( 0xe, 0x29 ) || look_12_1.between?( 0x2b, 0x2e ) || look_12_1.between?( 0x30, 0x3c ) || look_12_1.between?( 0x3e, 0xffff ) ) and ( ( @value_expected ) )\n alt_12 = 3\n else\n alt_12 = 4\n end\n else\n raise NoViableAlternative( \"\", 12, 0 )\n end\n case alt_12\n when 1\n # at line 867:5: '//' (~ ( '\\\\n' | '\\\\r' ) )*\n match( \"//\" )\n # at line 867:10: (~ ( '\\\\n' | '\\\\r' ) )*\n while true # decision 6\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0.between?( 0x0, 0x9 ) || look_6_0.between?( 0xb, 0xc ) || look_6_0.between?( 0xe, 0xffff ) )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line 867:10: ~ ( '\\\\n' | '\\\\r' )\n if @input.peek( 1 ).between?( 0x0, 0x9 ) || @input.peek( 1 ).between?( 0xb, 0xc ) || @input.peek( 1 ).between?( 0xe, 0xff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n break # out of loop for decision 6\n end\n end # loop for decision 6\n # --> action\n type = LINE_COMMENT; channel = HIDDEN \n # <-- action\n\n when 2\n # at line 868:5: '/*' ( . )* '*/'\n match( \"/*\" )\n # at line 868:10: ( . )*\n while true # decision 7\n alt_7 = 2\n look_7_0 = @input.peek( 1 )\n\n if ( look_7_0 == 0x2a )\n look_7_1 = @input.peek( 2 )\n\n if ( look_7_1 == 0x2f )\n alt_7 = 2\n elsif ( look_7_1.between?( 0x0, 0x2e ) || look_7_1.between?( 0x30, 0xffff ) )\n alt_7 = 1\n\n end\n elsif ( look_7_0.between?( 0x0, 0x29 ) || look_7_0.between?( 0x2b, 0xffff ) )\n alt_7 = 1\n\n end\n case alt_7\n when 1\n # at line 868:10: .\n match_any\n\n else\n break # out of loop for decision 7\n end\n end # loop for decision 7\n match( \"*/\" )\n # --> action\n type = COMMENT; channel = HIDDEN \n # <-- action\n\n when 3\n # at line 869:5: {...}? => '/' (~ ( '/' | '*' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' ) | '\\\\\\\\' . | {...}? => INTERPOLATION | '#' ) (~ ( '/' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' ) | '\\\\\\\\' . | {...}? => INTERPOLATION | '#' )* '/' ( 'a' .. 'z' )*\n raise FailedPredicate( \"SLASH\", \" @value_expected \" ) unless ( ( @value_expected ) )\n match( 0x2f )\n # --> action\n type = REGEX \n # <-- action\n # at line 871:5: (~ ( '/' | '*' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' ) | '\\\\\\\\' . | {...}? => INTERPOLATION | '#' )\n alt_8 = 4\n alt_8 = @dfa8.predict( @input )\n case alt_8\n when 1\n # at line 871:7: ~ ( '/' | '*' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' )\n if @input.peek( 1 ).between?( 0x0, 0x9 ) || @input.peek( 1 ).between?( 0xb, 0xc ) || @input.peek( 1 ).between?( 0xe, 0x22 ) || @input.peek( 1 ).between?( 0x24, 0x29 ) || @input.peek( 1 ).between?( 0x2b, 0x2e ) || @input.peek( 1 ).between?( 0x30, 0x5b ) || @input.peek( 1 ).between?( 0x5d, 0xff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 872:7: '\\\\\\\\' .\n match( 0x5c )\n match_any\n\n when 3\n # at line 873:7: {...}? => INTERPOLATION\n raise FailedPredicate( \"SLASH\", \" at_interpolation? \" ) unless ( ( at_interpolation? ) )\n interpolation!\n # --> action\n type = DREGEX \n # <-- action\n\n when 4\n # at line 874:7: '#'\n match( 0x23 )\n\n end\n # at line 876:5: (~ ( '/' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' ) | '\\\\\\\\' . | {...}? => INTERPOLATION | '#' )*\n while true # decision 9\n alt_9 = 5\n alt_9 = @dfa9.predict( @input )\n case alt_9\n when 1\n # at line 876:7: ~ ( '/' | '\\\\\\\\' | '\\\\r' | '\\\\n' | '#' )\n if @input.peek( 1 ).between?( 0x0, 0x9 ) || @input.peek( 1 ).between?( 0xb, 0xc ) || @input.peek( 1 ).between?( 0xe, 0x22 ) || @input.peek( 1 ).between?( 0x24, 0x2e ) || @input.peek( 1 ).between?( 0x30, 0x5b ) || @input.peek( 1 ).between?( 0x5d, 0xff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 877:7: '\\\\\\\\' .\n match( 0x5c )\n match_any\n\n when 3\n # at line 878:7: {...}? => INTERPOLATION\n raise FailedPredicate( \"SLASH\", \" at_interpolation? \" ) unless ( ( at_interpolation? ) )\n interpolation!\n # --> action\n type = DREGEX \n # <-- action\n\n when 4\n # at line 879:7: '#'\n match( 0x23 )\n\n else\n break # out of loop for decision 9\n end\n end # loop for decision 9\n match( 0x2f )\n # at line 882:5: ( 'a' .. 'z' )*\n while true # decision 10\n alt_10 = 2\n look_10_0 = @input.peek( 1 )\n\n if ( look_10_0.between?( 0x61, 0x7a ) )\n alt_10 = 1\n\n end\n case alt_10\n when 1\n # at line 882:7: 'a' .. 'z'\n match_range( 0x61, 0x7a )\n\n else\n break # out of loop for decision 10\n end\n end # loop for decision 10\n\n when 4\n # at line 883:5: {...}? => '/' ( '=' | )\n raise FailedPredicate( \"SLASH\", \" !@value_expected \" ) unless ( ( !@value_expected ) )\n match( 0x2f )\n # at line 883:33: ( '=' | )\n alt_11 = 2\n look_11_0 = @input.peek( 1 )\n\n if ( look_11_0 == 0x3d )\n alt_11 = 1\n else\n alt_11 = 2\n end\n case alt_11\n when 1\n # at line 883:35: '='\n match( 0x3d )\n # --> action\n type = SLASH_ASGN \n # <-- action\n\n when 2\n # at line 883:64: \n # --> action\n type = SLASH \n # <-- action\n\n end\n\n end\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 96 )\n\n end",
"def t__29!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 18 )\n\n type = T__29\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 34:9: 'e'\n match( 0x65 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 18 )\n\n end",
"def t__17!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n\n type = T__17\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 26:9: 'd'\n match( 0x64 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n\n end",
"def comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 49 )\n\n\n\n type = COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 228:11: '//' ( . )* ( '\\\\n' | '\\\\r' )\n match( \"//\" )\n\n # at line 228:16: ( . )*\n while true # decision 6\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0 == 0xa || look_6_0 == 0xd )\n alt_6 = 2\n elsif ( look_6_0.between?( 0x0, 0x9 ) || look_6_0.between?( 0xb, 0xc ) || look_6_0.between?( 0xe, 0xffff ) )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line 228:16: .\n match_any\n\n else\n break # out of loop for decision 6\n end\n end # loop for decision 6\n\n if @input.peek(1) == 0xa || @input.peek(1) == 0xd\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n # --> action\n channel = HIDDEN;\n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 49 )\n\n\n end",
"def t__38!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 31 )\n\n type = T__38\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 47:9: 'O'\n match( 0x4f )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 31 )\n\n end",
"def tilde!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 40 )\n\n type = TILDE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 175:9: '~'\n match( 0x7e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 40 )\n\n end",
"def comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 35 )\n\n type = COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 350:9: '#' (~ ( '\\\\n' | '\\\\r' ) )*\n match( 0x23 )\n # at line 350:13: (~ ( '\\\\n' | '\\\\r' ) )*\n while true # decision 13\n alt_13 = 2\n look_13_0 = @input.peek( 1 )\n\n if ( look_13_0.between?( 0x0, 0x9 ) || look_13_0.between?( 0xb, 0xc ) || look_13_0.between?( 0xe, 0xffff ) )\n alt_13 = 1\n\n end\n case alt_13\n when 1\n # at line 350:13: ~ ( '\\\\n' | '\\\\r' )\n if @input.peek( 1 ).between?( 0x0, 0x9 ) || @input.peek( 1 ).between?( 0xb, 0xc ) || @input.peek( 1 ).between?( 0xe, 0xff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n break # out of loop for decision 13\n end\n end # loop for decision 13\n # --> action\n channel=HIDDEN;\n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 35 )\n\n end",
"def t__86!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 31)\n\n type = T__86\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 37:9: '=>'\n match(\"=>\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 31)\n\n end",
"def id!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 102 )\n\n type = ID\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 923:5: ( '$' | '_' | 'a' .. 'z' | 'A' .. 'Z' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' )*\n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # at line 924:5: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' )*\n while true # decision 31\n alt_31 = 2\n look_31_0 = @input.peek( 1 )\n\n if ( look_31_0 == 0x24 || look_31_0.between?( 0x30, 0x39 ) || look_31_0.between?( 0x41, 0x5a ) || look_31_0 == 0x5f || look_31_0.between?( 0x61, 0x7a ) )\n alt_31 = 1\n\n end\n case alt_31\n when 1\n # at line \n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n break # out of loop for decision 31\n end\n end # loop for decision 31\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 102 )\n\n end",
"def whitespace!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 45 )\n\n\n\n type = WHITESPACE\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 66:14: ( '\\\\t' | '\\\\f' | '\\\\n' | '\\\\r' | ' ' | '\\\\u00A0' )+\n # at file 66:14: ( '\\\\t' | '\\\\f' | '\\\\n' | '\\\\r' | ' ' | '\\\\u00A0' )+\n match_count_9 = 0\n while true\n alt_9 = 2\n look_9_0 = @input.peek( 1 )\n\n if ( look_9_0.between?( 0x9, 0xa ) || look_9_0.between?( 0xc, 0xd ) || look_9_0 == 0x20 || look_9_0 == 0xa0 )\n alt_9 = 1\n\n end\n case alt_9\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x9, 0xa ) || @input.peek( 1 ).between?( 0xc, 0xd ) || @input.peek(1) == 0x20 || @input.peek(1) == 0xa0\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_9 > 0 and break\n eee = EarlyExit(9)\n\n\n raise eee\n end\n match_count_9 += 1\n end\n\n\n\n # --> action\n channel=HIDDEN; \n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 45 )\n\n\n end",
"def t__19!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = T__19\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 28:9: 'l'\n match( 0x6c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end"
] | [
"0.64735585",
"0.6448308",
"0.64443517",
"0.6433646",
"0.64273816",
"0.6420842",
"0.6401554",
"0.63948524",
"0.63883007",
"0.63377064",
"0.6299572",
"0.6292796",
"0.62752616",
"0.6272566",
"0.626987",
"0.62571025",
"0.6247509",
"0.62355816",
"0.6226294",
"0.6211529",
"0.62088495",
"0.6176853",
"0.61697763",
"0.6164848",
"0.61610866",
"0.61605126",
"0.61591715",
"0.61472607",
"0.6138087",
"0.61372375",
"0.61348253",
"0.6122679",
"0.6093958",
"0.6092894",
"0.6091792",
"0.60825217",
"0.6082277",
"0.6078052",
"0.6057415",
"0.60558593",
"0.6049947",
"0.6038027",
"0.60326236",
"0.602791",
"0.6024246",
"0.6022995",
"0.602123",
"0.601843",
"0.60171705",
"0.6009055",
"0.60088557",
"0.6005905",
"0.599595",
"0.59939015",
"0.5980525",
"0.59724104",
"0.5970231",
"0.5964161",
"0.5961106",
"0.59572273",
"0.59506834",
"0.5944014",
"0.59428304",
"0.59419906",
"0.5939411",
"0.5936604",
"0.5932825",
"0.59327054",
"0.5923926",
"0.59182435",
"0.59180367",
"0.5912093",
"0.590225",
"0.58977026",
"0.58960986",
"0.5886876",
"0.58829707",
"0.5873423",
"0.5868434",
"0.5852304",
"0.5852171",
"0.5846243",
"0.5846042",
"0.58394116",
"0.58389807",
"0.5823364",
"0.5820285",
"0.5808122",
"0.58033603",
"0.5800569",
"0.5799039",
"0.57974416",
"0.5796487",
"0.57964563",
"0.5794803",
"0.5794231",
"0.57933307",
"0.57911056",
"0.5786075",
"0.5785278"
] | 0.65833735 | 0 |
lexer rule number! (NUMBER) (in CSV.g) | def number!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 2 )
type = NUMBER
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 11:10: ( '0' .. '9' )+
# at file 11:10: ( '0' .. '9' )+
match_count_1 = 0
while true
alt_1 = 2
look_1_0 = @input.peek( 1 )
if ( look_1_0.between?( 0x30, 0x39 ) )
alt_1 = 1
end
case alt_1
when 1
# at line 11:11: '0' .. '9'
match_range( 0x30, 0x39 )
else
match_count_1 > 0 and break
eee = EarlyExit(1)
raise eee
end
match_count_1 += 1
end
@state.type = type
@state.channel = channel
ensure
# -> uncomment the next line to manually enable rule tracing
# trace_out( __method__, 2 )
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n type = NUM\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 37:9: ( '1' .. '9' ) ( '0' .. '9' )*\n # at line 37:9: ( '1' .. '9' )\n # at line 37:10: '1' .. '9'\n match_range( 0x31, 0x39 )\n\n # at line 37:19: ( '0' .. '9' )*\n while true # decision 1\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0.between?( 0x30, 0x39 ) )\n alt_1 = 1\n\n end\n case alt_1\n when 1\n # at line 37:20: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n break # out of loop for decision 1\n end\n end # loop for decision 1\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 5 )\n\n end",
"def num_i!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 25 )\n\n\n\n type = NUM_I\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 46:8: ( '0' .. '9' )+\n # at file 46:8: ( '0' .. '9' )+\n match_count_4 = 0\n while true\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x30, 0x39 ) )\n alt_4 = 1\n\n end\n case alt_4\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_4 > 0 and break\n eee = EarlyExit(4)\n\n\n raise eee\n end\n match_count_4 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 25 )\n\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 53 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 352:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 53 )\n\n end",
"def num_f!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n\n\n type = NUM_F\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 47:8: ( '0' .. '9' )+ '.' ( '0' .. '9' )+\n # at file 47:8: ( '0' .. '9' )+\n match_count_5 = 0\n while true\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0.between?( 0x30, 0x39 ) )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n match( 0x2e )\n # at file 47:24: ( '0' .. '9' )+\n match_count_6 = 0\n while true\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0.between?( 0x30, 0x39 ) )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_6 > 0 and break\n eee = EarlyExit(6)\n\n\n raise eee\n end\n match_count_6 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 37 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 136:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 37 )\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 47 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 184:12: ( '0' .. '9' )+\n # at file 184:12: ( '0' .. '9' )+\n match_count_3 = 0\n while true\n alt_3 = 2\n look_3_0 = @input.peek( 1 )\n\n if ( look_3_0.between?( 0x30, 0x39 ) )\n alt_3 = 1\n\n end\n case alt_3\n when 1\n # at line 184:13: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_3 > 0 and break\n eee = EarlyExit(3)\n\n\n raise eee\n end\n match_count_3 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 47 )\n\n end",
"def number_token\n return unless match = @chunk.match(NUMBER)\n number = match[0]\n lexed_length = number.size\n token(:NUMBER, number, 0, lexed_length)\n lexed_length\n end",
"def number!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 99 )\n\n type = NUMBER\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 908:3: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )? | ( '.' )? ( '0' .. '9' )+ ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )? | '0' ( 'x' | 'X' ) ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )+ )\n alt_28 = 3\n alt_28 = @dfa28.predict( @input )\n case alt_28\n when 1\n # at line 908:5: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )?\n # at file 908:5: ( '0' .. '9' )+\n match_count_17 = 0\n while true\n alt_17 = 2\n look_17_0 = @input.peek( 1 )\n\n if ( look_17_0.between?( 0x30, 0x39 ) )\n alt_17 = 1\n\n end\n case alt_17\n when 1\n # at line 908:6: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_17 > 0 and break\n eee = EarlyExit(17)\n\n\n raise eee\n end\n match_count_17 += 1\n end\n\n match( 0x2e )\n # at line 908:21: ( '0' .. '9' )*\n while true # decision 18\n alt_18 = 2\n look_18_0 = @input.peek( 1 )\n\n if ( look_18_0.between?( 0x30, 0x39 ) )\n alt_18 = 1\n\n end\n case alt_18\n when 1\n # at line 908:22: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n break # out of loop for decision 18\n end\n end # loop for decision 18\n # at line 908:33: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )?\n alt_21 = 2\n look_21_0 = @input.peek( 1 )\n\n if ( look_21_0 == 0x45 || look_21_0 == 0x65 )\n alt_21 = 1\n end\n case alt_21\n when 1\n # at line 908:35: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+\n if @input.peek(1) == 0x45 || @input.peek(1) == 0x65\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # at line 908:47: ( '+' | '-' )?\n alt_19 = 2\n look_19_0 = @input.peek( 1 )\n\n if ( look_19_0 == 0x2b || look_19_0 == 0x2d )\n alt_19 = 1\n end\n case alt_19\n when 1\n # at line \n if @input.peek(1) == 0x2b || @input.peek(1) == 0x2d\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n end\n # at file 908:60: ( '0' .. '9' )+\n match_count_20 = 0\n while true\n alt_20 = 2\n look_20_0 = @input.peek( 1 )\n\n if ( look_20_0.between?( 0x30, 0x39 ) )\n alt_20 = 1\n\n end\n case alt_20\n when 1\n # at line 908:61: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_20 > 0 and break\n eee = EarlyExit(20)\n\n\n raise eee\n end\n match_count_20 += 1\n end\n\n\n end\n\n when 2\n # at line 909:5: ( '.' )? ( '0' .. '9' )+ ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )?\n # at line 909:5: ( '.' )?\n alt_22 = 2\n look_22_0 = @input.peek( 1 )\n\n if ( look_22_0 == 0x2e )\n alt_22 = 1\n end\n case alt_22\n when 1\n # at line 909:5: '.'\n match( 0x2e )\n\n end\n # at file 909:10: ( '0' .. '9' )+\n match_count_23 = 0\n while true\n alt_23 = 2\n look_23_0 = @input.peek( 1 )\n\n if ( look_23_0.between?( 0x30, 0x39 ) )\n alt_23 = 1\n\n end\n case alt_23\n when 1\n # at line 909:11: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_23 > 0 and break\n eee = EarlyExit(23)\n\n\n raise eee\n end\n match_count_23 += 1\n end\n\n # at line 909:22: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )?\n alt_26 = 2\n look_26_0 = @input.peek( 1 )\n\n if ( look_26_0 == 0x45 || look_26_0 == 0x65 )\n alt_26 = 1\n end\n case alt_26\n when 1\n # at line 909:24: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+\n if @input.peek(1) == 0x45 || @input.peek(1) == 0x65\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # at line 909:36: ( '+' | '-' )?\n alt_24 = 2\n look_24_0 = @input.peek( 1 )\n\n if ( look_24_0 == 0x2b || look_24_0 == 0x2d )\n alt_24 = 1\n end\n case alt_24\n when 1\n # at line \n if @input.peek(1) == 0x2b || @input.peek(1) == 0x2d\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n end\n # at file 909:49: ( '0' .. '9' )+\n match_count_25 = 0\n while true\n alt_25 = 2\n look_25_0 = @input.peek( 1 )\n\n if ( look_25_0.between?( 0x30, 0x39 ) )\n alt_25 = 1\n\n end\n case alt_25\n when 1\n # at line 909:50: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_25 > 0 and break\n eee = EarlyExit(25)\n\n\n raise eee\n end\n match_count_25 += 1\n end\n\n\n end\n\n when 3\n # at line 910:5: '0' ( 'x' | 'X' ) ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )+\n match( 0x30 )\n if @input.peek(1) == 0x58 || @input.peek(1) == 0x78\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # at file 910:21: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )+\n match_count_27 = 0\n while true\n alt_27 = 2\n look_27_0 = @input.peek( 1 )\n\n if ( look_27_0.between?( 0x30, 0x39 ) || look_27_0.between?( 0x41, 0x46 ) || look_27_0.between?( 0x61, 0x66 ) )\n alt_27 = 1\n\n end\n case alt_27\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x46 ) || @input.peek( 1 ).between?( 0x61, 0x66 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_27 > 0 and break\n eee = EarlyExit(27)\n\n\n raise eee\n end\n match_count_27 += 1\n end\n\n\n end\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 99 )\n\n end",
"def k_num!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 47 )\n\n\n\n type = K_NUM\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 441:4: ( 'round' | 'aleatorio' | 'between' )\n # at line 441:4: ( 'round' | 'aleatorio' | 'between' )\n alt_14 = 3\n case look_14 = @input.peek( 1 )\n when 0x72 then alt_14 = 1\n when 0x61 then alt_14 = 2\n when 0x62 then alt_14 = 3\n else\n raise NoViableAlternative( \"\", 14, 0 )\n\n end\n case alt_14\n when 1\n # at line 441:5: 'round'\n match( \"round\" )\n\n\n when 2\n # at line 441:13: 'aleatorio'\n match( \"aleatorio\" )\n\n\n when 3\n # at line 441:25: 'between'\n match( \"between\" )\n\n\n end\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 47 )\n\n\n end",
"def integer!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n\n\n type = INTEGER\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 191:10: ( '0' .. '9' )+\n # at file 191:10: ( '0' .. '9' )+\n match_count_1 = 0\n while true\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0.between?( 0x30, 0x39 ) )\n alt_1 = 1\n\n end\n case alt_1\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_1 > 0 and break\n eee = EarlyExit(1)\n\n\n raise eee\n end\n match_count_1 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n\n end",
"def int!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 48)\n\n type = INT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 526:7: ( '0' .. '9' )+\n # at file 526:7: ( '0' .. '9' )+\n match_count_10 = 0\n loop do\n alt_10 = 2\n look_10_0 = @input.peek(1)\n\n if (look_10_0.between?(?0, ?9)) \n alt_10 = 1\n\n end\n case alt_10\n when 1\n # at line 526:7: '0' .. '9'\n match_range(?0, ?9)\n\n else\n match_count_10 > 0 and break\n eee = EarlyExit(10)\n\n\n raise eee\n end\n match_count_10 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 48)\n\n end",
"def number_token\n return nil unless md = NUMBER.match(@chunk)\n number = md.to_a[0]\n token :Number, number\n number.length\n end",
"def num\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 55 )\n return_value = NumReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n num_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n # at line 345:22: ( DIGIT )+\n # at file 345:22: ( DIGIT )+\n match_count_39 = 0\n while true\n alt_39 = 2\n look_39_0 = @input.peek( 1 )\n\n if ( look_39_0 == DIGIT )\n alt_39 = 1\n\n end\n case alt_39\n when 1\n # at line 0:0: DIGIT\n match( DIGIT, TOKENS_FOLLOWING_DIGIT_IN_num_2214 )\n\n else\n match_count_39 > 0 and break\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n eee = EarlyExit(39)\n\n\n raise eee\n end\n match_count_39 += 1\n end\n\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n return_value.value = @input.to_s( return_value.start, @input.look( -1 ) ).to_i\n # <-- action\n end\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 55 )\n memoize( __method__, num_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end",
"def number\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n value = nil\n __DEC_NUMBER1__ = nil\n __HEX_NUMBER2__ = nil\n\n begin\n # at line 22:2: ( DEC_NUMBER | HEX_NUMBER )\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0 == DEC_NUMBER )\n alt_1 = 1\n elsif ( look_1_0 == HEX_NUMBER )\n alt_1 = 2\n else\n raise NoViableAlternative( \"\", 1, 0 )\n end\n case alt_1\n when 1\n # at line 22:4: DEC_NUMBER\n __DEC_NUMBER1__ = match( DEC_NUMBER, TOKENS_FOLLOWING_DEC_NUMBER_IN_number_180 )\n # --> action\n value = __DEC_NUMBER1__.text.to_i \n # <-- action\n\n when 2\n # at line 23:4: HEX_NUMBER\n __HEX_NUMBER2__ = match( HEX_NUMBER, TOKENS_FOLLOWING_HEX_NUMBER_IN_number_187 )\n # --> action\n value = __HEX_NUMBER2__.text[2..-1].to_i(16) \n # <-- action\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end\n \n return value\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 46 )\n\n \n # - - - - main rule block - - - -\n # at line 392:10: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 46 )\n\n end",
"def number\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 29 )\n return_value = NumberReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n begin\n # at line 126:12: ( digits ( DOT digits )? )\n # at line 126:12: ( digits ( DOT digits )? )\n # at line 126:14: digits ( DOT digits )?\n @state.following.push( TOKENS_FOLLOWING_digits_IN_number_1255 )\n digits\n @state.following.pop\n # at line 126:21: ( DOT digits )?\n alt_44 = 2\n look_44_0 = @input.peek( 1 )\n\n if ( look_44_0 == DOT )\n alt_44 = 1\n end\n case alt_44\n when 1\n # at line 126:23: DOT digits\n match( DOT, TOKENS_FOLLOWING_DOT_IN_number_1259 )\n @state.following.push( TOKENS_FOLLOWING_digits_IN_number_1261 )\n digits\n @state.following.pop\n\n end\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 29 )\n\n end\n \n return return_value\n end",
"def number\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 32 )\n return_value = NumberReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n begin\n # at line 154:12: ( ( DIGIT )* ( '.' ( DIGIT )+ )? )\n # at line 154:12: ( ( DIGIT )* ( '.' ( DIGIT )+ )? )\n # at line 154:14: ( DIGIT )* ( '.' ( DIGIT )+ )?\n # at line 154:14: ( DIGIT )*\n while true # decision 40\n alt_40 = 2\n look_40_0 = @input.peek(1)\n\n if (look_40_0 == DIGIT)\n alt_40 = 1\n\n end\n case alt_40\n when 1\n # at line 154:14: DIGIT\n match(DIGIT, TOKENS_FOLLOWING_DIGIT_IN_number_1205)\n\n else\n break # out of loop for decision 40\n end\n end # loop for decision 40\n # at line 154:21: ( '.' ( DIGIT )+ )?\n alt_42 = 2\n look_42_0 = @input.peek(1)\n\n if (look_42_0 == T__33)\n alt_42 = 1\n end\n case alt_42\n when 1\n # at line 154:23: '.' ( DIGIT )+\n match(T__33, TOKENS_FOLLOWING_T__33_IN_number_1210)\n # at file 154:27: ( DIGIT )+\n match_count_41 = 0\n while true\n alt_41 = 2\n look_41_0 = @input.peek(1)\n\n if (look_41_0 == DIGIT)\n alt_41 = 1\n\n end\n case alt_41\n when 1\n # at line 154:27: DIGIT\n match(DIGIT, TOKENS_FOLLOWING_DIGIT_IN_number_1212)\n\n else\n match_count_41 > 0 and break\n eee = EarlyExit(41)\n\n\n raise eee\n end\n match_count_41 += 1\n end\n\n\n end\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 32 )\n\n end\n\n return return_value\n end",
"def int!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 33 )\n\n type = INT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 339:7: ( '0' .. '9' )+\n # at file 339:7: ( '0' .. '9' )+\n match_count_5 = 0\n while true\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0.between?( 0x30, 0x39 ) )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line 339:7: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 33 )\n\n end",
"def t__16!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 7)\n\n type = T__16\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ','\n match(?,)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 7)\n\n end",
"def xdigit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 47)\n\n \n # - - - - main rule block - - - -\n # at line \n if @input.peek(1).between?(?0, ?9) || @input.peek(1).between?(?A, ?F) || @input.peek(1).between?(?a, ?f)\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 47)\n\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 23 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 39:9: 'N'\n match( 0x4e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 23 )\n\n end",
"def t__11!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n type = T__11\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 7:9: '('\n match( 0x28 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end",
"def t__63!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n type = T__63\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 8:9: '\\\\n'\n match( 0xa )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n end",
"def tokenize_number(&block) # :yields: SQLTree::Token::Number\n number = current_char\n dot_encountered = false\n while /\\d/ =~ peek_char || (peek_char == '.' && !dot_encountered)\n dot_encountered = true if peek_char == '.'\n number << next_char\n end\n\n if dot_encountered\n handle_token(SQLTree::Token::Number.new(number.to_f), &block)\n else\n handle_token(SQLTree::Token::Number.new(number.to_i), &block)\n end\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 151:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n end",
"def t__33!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n type = T__33\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 42:9: 'n'\n match( 0x6e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 43 )\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 368:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 43 )\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 9:9: ')'\n match( 0x29 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def parse_number\n self.lex_state = :expr_end\n\n case\n when src.scan(/[+-]?0[xXbBdD]\\b/) then\n rb_compile_error \"Invalid numeric format\"\n when src.scan(/[+-]?(?:(?:[1-9][\\d_]*|0)(?!\\.\\d)\\b|0[Dd][0-9_]+)/) then\n int_with_base(10)\n when src.scan(/[+-]?0x[a-f0-9_]+/i) then\n int_with_base(16)\n when src.scan(/[+-]?0[Bb][01_]+/) then\n int_with_base(2)\n when src.scan(/[+-]?0[Oo]?[0-7_]*[89]/) then\n rb_compile_error \"Illegal octal digit.\"\n when src.scan(/[+-]?0[Oo]?[0-7_]+|0[Oo]/) then\n int_with_base(8)\n when src.scan(/[+-]?[\\d_]+_(e|\\.)/) then\n rb_compile_error \"Trailing '_' in number.\"\n when src.scan(/[+-]?[\\d_]+\\.[\\d_]+(e[+-]?[\\d_]+)?\\b|[+-]?[\\d_]+e[+-]?[\\d_]+\\b/i) then\n number = src.matched\n if number =~ /__/ then\n rb_compile_error \"Invalid numeric format\"\n end\n self.yacc_value = number.to_f\n :tFLOAT\n when src.scan(/[+-]?[0-9_]+(?![e])/) then\n int_with_base(10)\n else\n rb_compile_error \"Bad number format\"\n end\n end",
"def token!\r\n # at line 1:8: ( T__6 | NUMBER | SPACE )\r\n alt_3 = 3\r\n case look_3 = @input.peek( 1 )\r\n when 0x2b then alt_3 = 1\r\n when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39 then alt_3 = 2\r\n when 0x20 then alt_3 = 3\r\n else\r\n raise NoViableAlternative( \"\", 3, 0 )\r\n end\r\n case alt_3\r\n when 1\r\n # at line 1:10: T__6\r\n t__6!\r\n\r\n when 2\r\n # at line 1:15: NUMBER\r\n number!\r\n\r\n when 3\r\n # at line 1:22: SPACE\r\n space!\r\n\r\n end\r\n end",
"def t__31!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 24 )\n\n type = T__31\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 40:9: 'i'\n match( 0x69 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 24 )\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 57:8: ','\n match( 0x2c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n\n end",
"def read_number(token)\n current = @marker.character\n is_float = current == ?.\n is_exponent = false\n token.kind = is_float ? :float_lit : :integer_lit\n\n while (current = peek_next())\n case current\n # Float lit\n when ?.\n break if is_float == true\n is_float = true\n token.kind = :float_lit\n read_next()\n\n # Digit\n when ?0, ?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9\n read_next()\n\n # Exponent\n when ?e, ?E\n if is_exponent\n token.kind = :invalid\n raise_error(:duplicate_exponent,\n \"Malformed number literal: exponent already provided\")\n end\n\n is_exponent = true\n token.kind = is_float ? :float_exp_lit : :integer_exp_lit\n\n read_next()\n current = read_next()\n current = read_next() if current == ?- || current == ?+\n\n if current < ?0 || current > ?9\n raise_error(:malformed_exponent, \"Malformed number literal: exponent expected but not provided\")\n end\n\n else break\n end\n end\n\n token.value = @source[(token.from .. @marker.source_index)]\n end",
"def initializer_expression()\n\n\n\n # 115:7: NUMBER\n match(:NUMBER)\n\n\n\n end",
"def t__35!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 24 )\n\n type = T__35\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 40:9: 'n'\n match( 0x6e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 24 )\n\n end",
"def t__80!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 25)\n\n type = T__80\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 31:9: ','\n match(?,)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 25)\n\n end",
"def t__15!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 6)\n\n type = T__15\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 21:9: ')'\n match(?))\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 6)\n\n end",
"def number_delimiter number\n\t\tnumber_with_delimiter(number, delimiter: \",\")\n\tend",
"def t__32!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 25 )\n\n type = T__32\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 41:9: 'N'\n match( 0x4e )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 25 )\n\n end",
"def lex_anly(check)\n case check\n when '('\n return \"LPAREN\", '('\n when ')'\n return \"RPAREN\", ')'\n when \"0\"..\"9\"\n number = \"\"\n #displacement variable\n dis = 0\n if /[1-9]/.match(check)\n number << check\n check = @tokens[@pos]\n while /[0-9]/.match(check)\n number << check\n dis += 1\n check = @tokens[@pos + dis]\n end\n if check == \".\"\n number << check\n dis += 1\n check = @tokens[@pos+dis]\n else\n @pos += dis\n return \"NUMBER\", number\n end\n while /[0-9]/.match(check)\n number << check\n dis+=1\n check = @tokens[@pos+dis]\n end\n @pos += dis\n return \"NUMBER\", number\n end\n return \"NUMBER\", number\n when '/'\n return \"DIV\", '/'\n when '+'\n return \"ADD\", '+'\n when \"*\"\n return \"MUL\", \"*\"\n when \"-\"\n return \"MIN\", \"-\"\n else\n print(\"lexical error \")\n print(check)\n print(\" is not a valid token\")\n return \"INVALID\", check\n end\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 11:9: '('\n match( 0x28 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 5 )\n\n\n end",
"def integer!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 13 )\n\n\n\n type = INTEGER\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 34:10: 'nocortable'\n match( \"nocortable\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 13 )\n\n\n end",
"def digit \n\t\n\t$cst.add_branch(\"digit\")\n\t\n\tmatch_token(\"T_DIGIT\", $tokens[$index])\n\t\n\t$cst.ascend\n\t\nend",
"def t__12!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 3)\n\n type = T__12\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 3)\n\n end",
"def consume_numeric\n number = consume_number\n\n if start_identifier?\n create_token(:dimension,\n :repr => number[0],\n :type => number[2],\n :unit => consume_name,\n :value => number[1])\n\n elsif @s.peek == '%'\n @s.consume\n\n create_token(:percentage,\n :repr => number[0],\n :type => number[2],\n :value => number[1])\n\n else\n create_token(:number,\n :repr => number[0],\n :type => number[2],\n :value => number[1])\n end\n end",
"def decimal_literal!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 69 )\n\n\n\n type = DecimalLiteral\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 525:18: ( '0' | '1' .. '9' ( '0' .. '9' )* )\n # at line 525:18: ( '0' | '1' .. '9' ( '0' .. '9' )* )\n alt_21 = 2\n look_21_0 = @input.peek( 1 )\n\n if ( look_21_0 == 0x30 )\n alt_21 = 1\n elsif ( look_21_0.between?( 0x31, 0x39 ) )\n alt_21 = 2\n else\n raise NoViableAlternative( \"\", 21, 0 )\n\n end\n case alt_21\n when 1\n # at line 525:19: '0'\n match( 0x30 )\n\n when 2\n # at line 525:25: '1' .. '9' ( '0' .. '9' )*\n match_range( 0x31, 0x39 )\n # at line 525:34: ( '0' .. '9' )*\n while true # decision 20\n alt_20 = 2\n look_20_0 = @input.peek( 1 )\n\n if ( look_20_0.between?( 0x30, 0x39 ) )\n alt_20 = 1\n\n end\n case alt_20\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n break # out of loop for decision 20\n end\n end # loop for decision 20\n\n\n end\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 69 )\n\n\n end",
"def write_num_lit(data)\n write_num_base('c:numLit', data)\n end",
"def accion_num\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 34 )\n\n\n return_value = AccionNumReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n\n root_0 = nil\n\n __K_NUM168__ = nil\n __LPAR169__ = nil\n __Identificador170__ = nil\n __COMA172__ = nil\n __RPAR174__ = nil\n __EOL175__ = nil\n var_local171 = nil\n valor173 = nil\n\n\n tree_for_K_NUM168 = nil\n tree_for_LPAR169 = nil\n tree_for_Identificador170 = nil\n tree_for_COMA172 = nil\n tree_for_RPAR174 = nil\n tree_for_EOL175 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 168:4: K_NUM LPAR ( Identificador | var_local ) COMA valor RPAR EOL\n __K_NUM168__ = match( K_NUM, TOKENS_FOLLOWING_K_NUM_IN_accion_num_768 )\n if @state.backtracking == 0\n tree_for_K_NUM168 = @adaptor.create_with_payload( __K_NUM168__ )\n @adaptor.add_child( root_0, tree_for_K_NUM168 )\n\n end\n\n __LPAR169__ = match( LPAR, TOKENS_FOLLOWING_LPAR_IN_accion_num_770 )\n if @state.backtracking == 0\n tree_for_LPAR169 = @adaptor.create_with_payload( __LPAR169__ )\n @adaptor.add_child( root_0, tree_for_LPAR169 )\n\n end\n\n # at line 168:15: ( Identificador | var_local )\n alt_23 = 2\n look_23_0 = @input.peek( 1 )\n\n if ( look_23_0 == Identificador )\n alt_23 = 1\n elsif ( look_23_0 == DOUBLEDOT )\n alt_23 = 2\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n\n\n raise NoViableAlternative( \"\", 23, 0 )\n\n end\n case alt_23\n when 1\n # at line 168:16: Identificador\n __Identificador170__ = match( Identificador, TOKENS_FOLLOWING_Identificador_IN_accion_num_773 )\n if @state.backtracking == 0\n tree_for_Identificador170 = @adaptor.create_with_payload( __Identificador170__ )\n @adaptor.add_child( root_0, tree_for_Identificador170 )\n\n end\n\n\n when 2\n # at line 168:30: var_local\n @state.following.push( TOKENS_FOLLOWING_var_local_IN_accion_num_775 )\n var_local171 = var_local\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, var_local171.tree )\n end\n\n\n end\n __COMA172__ = match( COMA, TOKENS_FOLLOWING_COMA_IN_accion_num_778 )\n if @state.backtracking == 0\n tree_for_COMA172 = @adaptor.create_with_payload( __COMA172__ )\n @adaptor.add_child( root_0, tree_for_COMA172 )\n\n end\n\n @state.following.push( TOKENS_FOLLOWING_valor_IN_accion_num_780 )\n valor173 = valor\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, valor173.tree )\n end\n\n __RPAR174__ = match( RPAR, TOKENS_FOLLOWING_RPAR_IN_accion_num_782 )\n if @state.backtracking == 0\n tree_for_RPAR174 = @adaptor.create_with_payload( __RPAR174__ )\n @adaptor.add_child( root_0, tree_for_RPAR174 )\n\n end\n\n __EOL175__ = match( EOL, TOKENS_FOLLOWING_EOL_IN_accion_num_784 )\n if @state.backtracking == 0\n tree_for_EOL175 = @adaptor.create_with_payload( __EOL175__ )\n @adaptor.add_child( root_0, tree_for_EOL175 )\n\n end\n\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n if @state.backtracking == 0\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 34 )\n\n\n end\n\n return return_value\n end",
"def process_lit(exp)\n exp.shift\n value = exp.shift\n\n if value.is_a?(Numeric) && !@ignore.include?(value)\n @file.magic_numbers << MagicNumber.new(:value => value, :line => exp.line)\n end\n\n s()\n end",
"def decimal!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 5)\n\n type = DECIMAL\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 25:5: ( '-' )? '1' .. '9' ( '0' .. '9' )*\n # at line 25:5: ( '-' )?\n alt_9 = 2\n look_9_0 = @input.peek(1)\n\n if (look_9_0 == ?-) \n alt_9 = 1\n end\n case alt_9\n when 1\n # at line 25:5: '-'\n match(?-)\n\n end\n match_range(?1, ?9)\n # at line 25:20: ( '0' .. '9' )*\n loop do #loop 10\n alt_10 = 2\n look_10_0 = @input.peek(1)\n\n if (look_10_0.between?(?0, ?9)) \n alt_10 = 1\n\n end\n case alt_10\n when 1\n # at line 25:21: '0' .. '9'\n match_range(?0, ?9)\n\n else\n break #loop 10\n end\n end\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 5)\n\n end",
"def t__23!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = T__23\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 28:9: 'l'\n match( 0x6c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end",
"def t__71!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 16)\n\n type = T__71\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 16)\n\n end",
"def t__22!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 15 )\n\n type = T__22\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 31:9: 'M'\n match( 0x4d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 15 )\n\n end",
"def t__22!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n type = T__22\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 27:9: 'L'\n match( 0x4c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 27 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 43:9: 'C'\n match( 0x43 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 27 )\n\n end",
"def parseNumber _args\n \"parseNumber _args;\" \n end",
"def t__33!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 22 )\n\n type = T__33\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 38:9: 'y'\n match( 0x79 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 22 )\n\n end",
"def t__25!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 14 )\n\n type = T__25\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 30:9: 'i'\n match( 0x69 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 14 )\n\n end",
"def t__40!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n\n\n type = T__40\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 17:9: ','\n match( 0x2c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n\n end",
"def id!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 102 )\n\n type = ID\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 923:5: ( '$' | '_' | 'a' .. 'z' | 'A' .. 'Z' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' )*\n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # at line 924:5: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' )*\n while true # decision 31\n alt_31 = 2\n look_31_0 = @input.peek( 1 )\n\n if ( look_31_0 == 0x24 || look_31_0.between?( 0x30, 0x39 ) || look_31_0.between?( 0x41, 0x5a ) || look_31_0 == 0x5f || look_31_0.between?( 0x61, 0x7a ) )\n alt_31 = 1\n\n end\n case alt_31\n when 1\n # at line \n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n break # out of loop for decision 31\n end\n end # loop for decision 31\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 102 )\n\n end",
"def t__35!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n\n\n type = T__35\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 12:9: ')'\n match( 0x29 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n\n end",
"def t__42!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 35 )\n\n type = T__42\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 51:9: 'U'\n match( 0x55 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 35 )\n\n end",
"def t__90!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 35)\n\n type = T__90\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 41:9: '.'\n match(?.)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 35)\n\n end",
"def t__23!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n type = T__23\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 32:9: 'm'\n match( 0x6d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end",
"def t__15!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__15\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:9: 'u'\n match( 0x75 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__83!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 28)\n\n type = T__83\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 34:9: ')'\n match(?))\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 28)\n\n end",
"def t__11!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__11\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:9: 'e'\n match( 0x65 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__89!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 34)\n\n type = T__89\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 40:9: '>'\n match(?>)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 34)\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: 'a'\n match( 0x61 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end",
"def identificador!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 79 )\n\n\n\n type = Identificador\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 616:6: Letter ( Letter | Digito )*\n letter!\n\n # at line 616:13: ( Letter | Digito )*\n while true # decision 27\n alt_27 = 2\n look_27_0 = @input.peek( 1 )\n\n if ( look_27_0 == 0x24 || look_27_0.between?( 0x30, 0x39 ) || look_27_0.between?( 0x41, 0x5a ) || look_27_0 == 0x5f || look_27_0.between?( 0x61, 0x7a ) || look_27_0.between?( 0xc0, 0xd6 ) || look_27_0.between?( 0xd8, 0xf6 ) || look_27_0.between?( 0xf8, 0x1fff ) || look_27_0.between?( 0x3040, 0x318f ) || look_27_0.between?( 0x3300, 0x337f ) || look_27_0.between?( 0x3400, 0x3d2d ) || look_27_0.between?( 0x4e00, 0x9fff ) || look_27_0.between?( 0xf900, 0xfaff ) )\n alt_27 = 1\n\n end\n case alt_27\n when 1\n # at line \n if @input.peek(1) == 0x24 || @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x5a ) || @input.peek(1) == 0x5f || @input.peek( 1 ).between?( 0x61, 0x7a ) || @input.peek( 1 ).between?( 0xc0, 0xd6 ) || @input.peek( 1 ).between?( 0xd8, 0xf6 ) || @input.peek( 1 ).between?( 0xf8, 0x1fff ) || @input.peek( 1 ).between?( 0x3040, 0x318f ) || @input.peek( 1 ).between?( 0x3300, 0x337f ) || @input.peek( 1 ).between?( 0x3400, 0x3d2d ) || @input.peek( 1 ).between?( 0x4e00, 0x9fff ) || @input.peek( 1 ).between?( 0xf900, 0xfaff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n break # out of loop for decision 27\n end\n end # loop for decision 27\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 79 )\n\n\n end",
"def t__62!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n type = T__62\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 7:9: '\\\\r'\n match( 0xd )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end",
"def t__43!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n type = T__43\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 52:9: 'u'\n match( 0x75 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end",
"def t__67!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 12)\n\n type = T__67\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: 'lexer'\n match(\"lexer\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 12)\n\n end",
"def t__32!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 21 )\n\n type = T__32\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 37:9: 'Y'\n match( 0x59 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 21 )\n\n end",
"def verse_re\n NUMBER_RE\n end",
"def t__14!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__14\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 10:9: '-'\n match( 0x2d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 4)\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: '::='\n match(\"::=\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 4)\n\n end",
"def t__93!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 38)\n\n type = T__93\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 44:9: '$'\n match(?$)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 38)\n\n end",
"def t__39!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 32 )\n\n type = T__39\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 48:9: 'o'\n match( 0x6f )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 32 )\n\n end",
"def t__52!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n type = T__52\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 11:9: '}'\n match( 0x7d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 5 )\n\n end",
"def t__58!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 47 )\n\n type = T__58\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 63:9: 'X'\n match( 0x58 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 47 )\n\n end",
"def t__41!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n type = T__41\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 46:9: 'm'\n match( 0x6d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n end",
"def t__31!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 20 )\n\n type = T__31\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 36:9: 'f'\n match( 0x66 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 20 )\n\n end",
"def t__10!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = T__10\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: 'E'\n match( 0x45 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def t__40!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 29 )\n\n type = T__40\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 45:9: 'M'\n match( 0x4d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 29 )\n\n end",
"def commas(num)\nend",
"def t__35!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 28 )\n\n type = T__35\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 44:9: 'c'\n match( 0x63 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 28 )\n\n end",
"def to_int!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n\n type = TO_INT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 312:10: 'int:'\n match( \"int:\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n\n end",
"def t__32!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n\n\n type = T__32\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 9:9: '%'\n match( 0x25 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n\n end",
"def t__42!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 13 )\n\n\n\n type = T__42\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 19:9: '-='\n match( \"-=\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 13 )\n\n\n end",
"def redirect!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = REDIRECT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 59:5: ( '0' .. '9' )* ( '>>' | '>' | '<<' | '<' ) ( '&' ( '0' .. '9' )+ )?\n # at line 59:5: ( '0' .. '9' )*\n while true # decision 18\n alt_18 = 2\n look_18_0 = @input.peek( 1 )\n\n if ( look_18_0.between?( 0x30, 0x39 ) )\n alt_18 = 1\n\n end\n case alt_18\n when 1\n # at line 59:6: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n break # out of loop for decision 18\n end\n end # loop for decision 18\n # at line 59:17: ( '>>' | '>' | '<<' | '<' )\n alt_19 = 4\n look_19_0 = @input.peek( 1 )\n\n if ( look_19_0 == 0x3e )\n look_19_1 = @input.peek( 2 )\n\n if ( look_19_1 == 0x3e )\n alt_19 = 1\n else\n alt_19 = 2\n end\n elsif ( look_19_0 == 0x3c )\n look_19_2 = @input.peek( 2 )\n\n if ( look_19_2 == 0x3c )\n alt_19 = 3\n else\n alt_19 = 4\n end\n else\n raise NoViableAlternative( \"\", 19, 0 )\n end\n case alt_19\n when 1\n # at line 59:18: '>>'\n match( \">>\" )\n\n when 2\n # at line 59:25: '>'\n match( 0x3e )\n\n when 3\n # at line 59:31: '<<'\n match( \"<<\" )\n\n when 4\n # at line 59:38: '<'\n match( 0x3c )\n\n end\n # at line 59:43: ( '&' ( '0' .. '9' )+ )?\n alt_21 = 2\n look_21_0 = @input.peek( 1 )\n\n if ( look_21_0 == 0x26 )\n alt_21 = 1\n end\n case alt_21\n when 1\n # at line 59:44: '&' ( '0' .. '9' )+\n match( 0x26 )\n # at file 59:48: ( '0' .. '9' )+\n match_count_20 = 0\n while true\n alt_20 = 2\n look_20_0 = @input.peek( 1 )\n\n if ( look_20_0.between?( 0x30, 0x39 ) )\n alt_20 = 1\n\n end\n case alt_20\n when 1\n # at line 59:49: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_20 > 0 and break\n eee = EarlyExit(20)\n\n\n raise eee\n end\n match_count_20 += 1\n end\n\n\n end\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end",
"def t__11!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 2)\n\n type = T__11\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 17:9: '::'\n match(\"::\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 2)\n\n end",
"def hex_digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n \n # - - - - main rule block - - - -\n # at line 291:13: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )\n if @input.peek( 1 ).between?( 0x30, 0x39 ) || @input.peek( 1 ).between?( 0x41, 0x46 ) || @input.peek( 1 ).between?( 0x61, 0x66 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end",
"def consume_numeric\n number = consume_number\n repr = number[0]\n value = number[1]\n type = number[2]\n\n if type == :integer\n value = value.to_i\n else\n value = value.to_f\n end\n\n if start_identifier?(@s.peek(3))\n create_token(:dimension,\n :repr => repr,\n :type => type,\n :unit => consume_name,\n :value => value)\n\n elsif @s.peek == '%'\n @s.consume\n\n create_token(:percentage,\n :repr => repr,\n :type => type,\n :value => value)\n\n else\n create_token(:number,\n :repr => repr,\n :type => type,\n :value => value)\n end\n end",
"def t__33!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n\n\n type = T__33\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 10:9: '&&'\n match( \"&&\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n\n end",
"def t__18!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n type = T__18\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 27:9: 'L'\n match( 0x4c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n end",
"def t__14!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = T__14\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: 'U'\n match( 0x55 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def consume_number; end",
"def t__30!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n\n\n type = T__30\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 7:9: '!'\n match( 0x21 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n\n end",
"def t__44!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 33 )\n\n type = T__44\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 49:9: 'V'\n match( 0x56 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 33 )\n\n end"
] | [
"0.7435637",
"0.71241164",
"0.68142223",
"0.6804753",
"0.67911065",
"0.664575",
"0.66360193",
"0.66350996",
"0.6434702",
"0.63688004",
"0.6323316",
"0.62384313",
"0.6213848",
"0.62036186",
"0.6157369",
"0.60919815",
"0.60741335",
"0.6072288",
"0.597126",
"0.5920029",
"0.5915701",
"0.58756405",
"0.58450454",
"0.5843708",
"0.58133966",
"0.5788083",
"0.5787471",
"0.5778506",
"0.5750298",
"0.5727403",
"0.5718271",
"0.57114625",
"0.57004184",
"0.56846553",
"0.5673325",
"0.5651912",
"0.5624838",
"0.5621845",
"0.5620147",
"0.56185263",
"0.5603607",
"0.5595552",
"0.55862135",
"0.55847055",
"0.55780756",
"0.5561995",
"0.55571234",
"0.5537169",
"0.55316377",
"0.55182076",
"0.55157256",
"0.55084735",
"0.5501966",
"0.549957",
"0.54978234",
"0.549579",
"0.54948133",
"0.545214",
"0.54461884",
"0.54356444",
"0.5419755",
"0.54057384",
"0.5404265",
"0.5399136",
"0.53970116",
"0.53877914",
"0.5380279",
"0.53751594",
"0.5372925",
"0.5366317",
"0.5363882",
"0.535522",
"0.5355025",
"0.5348614",
"0.53411573",
"0.5338251",
"0.5326598",
"0.53209406",
"0.5320701",
"0.5317509",
"0.53114325",
"0.53069794",
"0.530653",
"0.53054935",
"0.53046",
"0.5302219",
"0.53021973",
"0.5299152",
"0.5292331",
"0.5292077",
"0.529101",
"0.5274194",
"0.5272548",
"0.52722484",
"0.52674055",
"0.5256898",
"0.5256578",
"0.5254553",
"0.5254325",
"0.5245995"
] | 0.7509113 | 0 |
lexer rule space! (SPACE) (in CSV.g) | def space!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 3 )
type = SPACE
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 12:9: ( ' ' )+
# at file 12:9: ( ' ' )+
match_count_2 = 0
while true
alt_2 = 2
look_2_0 = @input.peek( 1 )
if ( look_2_0 == 0x20 )
alt_2 = 1
end
case alt_2
when 1
# at line 12:9: ' '
match( 0x20 )
else
match_count_2 > 0 and break
eee = EarlyExit(2)
raise eee
end
match_count_2 += 1
end
# --> action
channel = HIDDEN
# <-- action
@state.type = type
@state.channel = channel
ensure
# -> uncomment the next line to manually enable rule tracing
# trace_out( __method__, 3 )
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delim!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 13 )\n\n\n\n type = DELIM\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 305:3: '|'\n match( 0x7c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 13 )\n\n\n end",
"def scan_comma_spaces; end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 151:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 43 )\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 368:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 43 )\n\n end",
"def comma!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n\n\n type = COMMA\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 57:8: ','\n match( 0x2c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n\n end",
"def whitespace!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 45 )\n\n\n\n type = WHITESPACE\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 66:14: ( '\\\\t' | '\\\\f' | '\\\\n' | '\\\\r' | ' ' | '\\\\u00A0' )+\n # at file 66:14: ( '\\\\t' | '\\\\f' | '\\\\n' | '\\\\r' | ' ' | '\\\\u00A0' )+\n match_count_9 = 0\n while true\n alt_9 = 2\n look_9_0 = @input.peek( 1 )\n\n if ( look_9_0.between?( 0x9, 0xa ) || look_9_0.between?( 0xc, 0xd ) || look_9_0 == 0x20 || look_9_0 == 0xa0 )\n alt_9 = 1\n\n end\n case alt_9\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x9, 0xa ) || @input.peek( 1 ).between?( 0xc, 0xd ) || @input.peek(1) == 0x20 || @input.peek(1) == 0xa0\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_9 > 0 and break\n eee = EarlyExit(9)\n\n\n raise eee\n end\n match_count_9 += 1\n end\n\n\n\n # --> action\n channel=HIDDEN; \n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 45 )\n\n\n end",
"def scan_for_commas(token); end",
"def space!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = SPACE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 28:5: WS\n ws!\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def scan_for_space(token); end",
"def test_whitespace1\n token, value, rest = @c.lex(\" hello there \")\n assert_equal(:identifier, token)\n assert_equal('hello', value)\n assert_equal(' there ', rest)\n end",
"def parse_whitespace\n @lexer.next! while @lexer.get and @lexer.get.type == :whitespace\n true\n end",
"def space()\n # pocitanie riadkov\n self.line.push position if /\\n/ === data[position]\n /\\s/ === data[position]\n\n end",
"def scan_whitespace\n\t\t\tappend_scan(/[\\t\\x20]+/, :Spacing, nil)\n\t\tend",
"def test_whitespace2\n token, value, rest = @c.lex(\" \\n hello there \")\n assert_equal(:identifier, token)\n assert_equal('hello', value)\n assert_equal(' there ', rest)\n end",
"def skip_space; end",
"def t__80!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 25)\n\n type = T__80\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 31:9: ','\n match(?,)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 25)\n\n end",
"def skip_space=(_arg0); end",
"def generalized_delimiter!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 110 )\n\n \n # - - - - main rule block - - - -\n # at line 991:3: ( NESTED_PARENTHESES | NESTED_BRACKETS | NESTED_BRACES | NESTED_ANGLES | ( '!' | '@' | '~' | '`' | '^' | '&' | '*' | '-' | '+' | '=' | '|' | ':' | ';' | '.' | ',' | '?' | '/' | '\\\"' | '\\\\'' ) )\n alt_65 = 5\n case look_65 = @input.peek( 1 )\n when 0x28 then alt_65 = 1\n when 0x5b then alt_65 = 2\n when 0x7b then alt_65 = 3\n when 0x3c then alt_65 = 4\n when 0x21, 0x22, 0x26, 0x27, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x3a, 0x3b, 0x3d, 0x3f, 0x40, 0x5e, 0x60, 0x7c, 0x7e then alt_65 = 5\n else\n raise NoViableAlternative( \"\", 65, 0 )\n end\n case alt_65\n when 1\n # at line 991:5: NESTED_PARENTHESES\n nested_parentheses!\n\n when 2\n # at line 992:5: NESTED_BRACKETS\n nested_brackets!\n\n when 3\n # at line 993:5: NESTED_BRACES\n nested_braces!\n\n when 4\n # at line 994:5: NESTED_ANGLES\n nested_angles!\n\n when 5\n # at line 995:5: ( '!' | '@' | '~' | '`' | '^' | '&' | '*' | '-' | '+' | '=' | '|' | ':' | ';' | '.' | ',' | '?' | '/' | '\\\"' | '\\\\'' )\n if @input.peek( 1 ).between?( 0x21, 0x22 ) || @input.peek( 1 ).between?( 0x26, 0x27 ) || @input.peek( 1 ).between?( 0x2a, 0x2f ) || @input.peek( 1 ).between?( 0x3a, 0x3b ) || @input.peek(1) == 0x3d || @input.peek( 1 ).between?( 0x3f, 0x40 ) || @input.peek(1) == 0x5e || @input.peek(1) == 0x60 || @input.peek(1) == 0x7c || @input.peek(1) == 0x7e\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # --> action\n scan_until_delimiter( @input.peek( -1 ) ) \n # <-- action\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 110 )\n\n end",
"def skip_white_spaces\n while char == \" \" || char == \"\\n\" do\n go_next\n end\n end",
"def space_around_comma!\n substitute!(/[[:space:]]*,[[:space:]]*/, ',\n ')\n end",
"def skip_whitespace()\n current = @marker.character\n (current = read_next()) while current == ' ' || current == ?\\t || current == ?\\r\n end",
"def _space\n\n begin # choice\n _tmp = match_string(\" \")\n break if _tmp\n _tmp = match_string(\"\\t\")\n break if _tmp\n _tmp = apply(:_eol)\n end while false # end choice\n\n set_failed_rule :_space unless _tmp\n return _tmp\n end",
"def semicolon!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n type = SEMICOLON\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 171:4: ';'\n match( 0x3b )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end",
"def semicolon!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 46 )\n\n type = SEMICOLON\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 372:4: ';'\n match( 0x3b )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 46 )\n\n end",
"def check_missing_space(tokens, ix, grammar_path)\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 15 )\n\n \n # - - - - main rule block - - - -\n # at line 74:5: ( ' ' | '\\\\t' )+\n # at file 74:5: ( ' ' | '\\\\t' )+\n match_count_24 = 0\n while true\n alt_24 = 2\n look_24_0 = @input.peek( 1 )\n\n if ( look_24_0 == 0x9 || look_24_0 == 0x20 )\n alt_24 = 1\n\n end\n case alt_24\n when 1\n # at line \n if @input.peek(1) == 0x9 || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_24 > 0 and break\n eee = EarlyExit(24)\n\n\n raise eee\n end\n match_count_24 += 1\n end\n\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 15 )\n\n end",
"def whitespace_token\n return if !(match = @chunk.match(WHITESPACE)) || (@chunk[0] == \"\\n\")\n prev = @tokens[-1]\n prev.send(match ? :spaced= : :new_line=, true) if prev\n match ? match[0].size : 0\n end",
"def comilla!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n\n\n type = COMILLA\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 317:3: '\\\\''\n match( 0x27 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n\n end",
"def parse_whitespace\n match_regexp(/[ \\t]/)\n end",
"def semi!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 34 )\n\n type = SEMI\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 155:8: ';'\n match( 0x3b )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 34 )\n\n end",
"def tokens!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 59)\n\n type = TOKENS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 596:4: 'tokens' WS_LOOP '{'\n match(\"tokens\")\n ws_loop!\n match(?{)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 59)\n\n end",
"def _space\n\n _save = self.pos\n while true # choice\n _tmp = match_string(\" \")\n break if _tmp\n self.pos = _save\n _tmp = match_string(\"\\t\")\n break if _tmp\n self.pos = _save\n _tmp = apply(:_eol)\n break if _tmp\n self.pos = _save\n break\n end # end choice\n\n set_failed_rule :_space unless _tmp\n return _tmp\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 7 )\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 39:6: ( ' ' | '\\\\t' | '\\\\r' | '\\\\n' )+\n # at file 39:6: ( ' ' | '\\\\t' | '\\\\r' | '\\\\n' )+\n match_count_3 = 0\n while true\n alt_3 = 2\n look_3_0 = @input.peek( 1 )\n\n if ( look_3_0.between?( 0x9, 0xa ) || look_3_0 == 0xd || look_3_0 == 0x20 )\n alt_3 = 1\n\n end\n case alt_3\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x9, 0xa ) || @input.peek(1) == 0xd || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_3 > 0 and break\n eee = EarlyExit(3)\n\n\n raise eee\n end\n match_count_3 += 1\n end\n\n # --> action\n skip();\n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 7 )\n\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 16)\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 153:5: ( ' ' | '\\\\t' | '\\\\n' | '\\\\r' | '\\\\f' )+\n # at file 153:5: ( ' ' | '\\\\t' | '\\\\n' | '\\\\r' | '\\\\f' )+\n match_count_11 = 0\n while true\n alt_11 = 2\n look_11_0 = @input.peek(1)\n\n if (look_11_0.between?(?\\t, ?\\n) || look_11_0.between?(?\\f, ?\\r) || look_11_0 == ?\\s) \n alt_11 = 1\n\n end\n case alt_11\n when 1\n # at line \n if @input.peek(1).between?(?\\t, ?\\n) || @input.peek(1).between?(?\\f, ?\\r) || @input.peek(1) == ?\\s\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n else\n match_count_11 > 0 and break\n eee = EarlyExit(11)\n\n\n raise eee\n end\n match_count_11 += 1\n end\n\n # --> action\n skip \n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 16)\n\n end",
"def char!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 30 )\n\n\n\n type = CHAR\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 203:5: '\\\\'' . '\\\\''\n match( 0x27 )\n match_any\n match( 0x27 )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 30 )\n\n\n end",
"def delimiter(col)\n gmu(\"# \", col)\n end",
"def skip_whitespace\n self.advance while self.current == \" \"\n end",
"def scan_for_colon(token); end",
"def comma\n match(Token.new(:symbol, ','))\n end",
"def t__63!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n type = T__63\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 8:9: '\\\\n'\n match( 0xa )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 38 )\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 137:5: ( ' ' | '\\\\t' )+\n # at file 137:5: ( ' ' | '\\\\t' )+\n match_count_1 = 0\n while true\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0 == 0x9 || look_1_0 == 0x20 )\n alt_1 = 1\n\n end\n case alt_1\n when 1\n # at line \n if @input.peek(1) == 0x9 || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_1 > 0 and break\n eee = EarlyExit(1)\n\n\n raise eee\n end\n match_count_1 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 38 )\n\n end",
"def nextWhite()\r\n str = \"\"\r\n while /\\s/.match?(@c)\r\n str += @c\r\n nextCh()\r\n end\r\n \r\n return Token.new(Token::WHITESPACE, str)\r\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 54 )\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 353:5: ( ' ' | '\\\\t' )+\n # at file 353:5: ( ' ' | '\\\\t' )+\n match_count_5 = 0\n while true\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0 == 0x9 || look_5_0 == 0x20 )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line \n if @input.peek(1) == 0x9 || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 54 )\n\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 353:9: ( ' ' | '\\\\t' | '\\\\r' | '\\\\n' )\n if @input.peek( 1 ).between?( 0x9, 0xa ) || @input.peek(1) == 0xd || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n # --> action\n channel=HIDDEN;\n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end",
"def synpred10_Jejune\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 96 )\n\n # at line 790:7: ','\n match( COMMA, TOKENS_FOLLOWING_COMMA_IN_synpred10_Jejune_5611 )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 96 )\n\n end",
"def skip_whitespace\n while @char =~ /[\\s,;#]/\n # Comments begin with a semicolon and extend to the end of the line\n # Treat #! as a comment for shebang lines\n if @char == ';' || (@char == '#' && peek_char == '!')\n while @char && @char != \"\\n\"\n next_char\n end\n elsif @char == '#'\n break unless peek_char == '_'\n next_char; next_char # skip #_\n skip_whitespace\n incomplete_error \"Unexpected end of program after #_, expected a form\" unless @char\n parse_form # discard next form\n else\n next_char\n end\n end\n end",
"def _space\n\n _save = self.pos\n while true # choice\n _tmp = match_string(\" \")\n break if _tmp\n self.pos = _save\n _tmp = match_string(\"\\\\f\")\n break if _tmp\n self.pos = _save\n _tmp = match_string(\"\\\\v\")\n break if _tmp\n self.pos = _save\n _tmp = match_string(\"\\\\t\")\n break if _tmp\n self.pos = _save\n break\n end # end choice\n\n set_failed_rule :_space unless _tmp\n return _tmp\n end",
"def lexer_regexp; LEXER_REGEXP end",
"def semi!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 38 )\n\n\n\n type = SEMI\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 59:7: ';'\n match( 0x3b )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 38 )\n\n\n end",
"def pipe_asgn!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 81 )\n\n type = PIPE_ASGN\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 202:13: '|='\n match( \"|=\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 81 )\n\n end",
"def lex_en_plain_backslash_delimited=(_arg0); end",
"def lex_en_plain_backslash_delimited=(_arg0); end",
"def lex_en_plain_backslash_delimited=(_arg0); end",
"def _SPACE\n _tmp = match_string(\" \")\n set_failed_rule :_SPACE unless _tmp\n return _tmp\n end",
"def tokenize_concatenation_rule(rule)\r\n self.replace tokenize_concatenation_rule_recur(rule, self.clone)\r\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 8)\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 36:5: ( ' ' | '\\\\t' | '\\\\r' | '\\\\n' )+\n # at file 36:5: ( ' ' | '\\\\t' | '\\\\r' | '\\\\n' )+\n match_count_15 = 0\n loop do\n alt_15 = 2\n look_15_0 = @input.peek(1)\n\n if (look_15_0.between?(?\\t, ?\\n) || look_15_0 == ?\\r || look_15_0 == ?\\s) \n alt_15 = 1\n\n end\n case alt_15\n when 1\n # at line \n if @input.peek(1).between?(?\\t, ?\\n) || @input.peek(1) == ?\\r || @input.peek(1) == ?\\s\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n else\n match_count_15 > 0 and break\n eee = EarlyExit(15)\n\n\n raise eee\n end\n match_count_15 += 1\n end\n\n # --> action\n channel = HIDDEN \n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 8)\n\n end",
"def lex_comment line\n # do nothing\n end",
"def spaces; end",
"def spaces; end",
"def lex_en_plain_backslash_delimited; end",
"def lex_en_plain_backslash_delimited; end",
"def lex_en_plain_backslash_delimited; end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 4)\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 19:9: '::='\n match(\"::=\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 4)\n\n end",
"def t__16!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 7)\n\n type = T__16\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ','\n match(?,)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 7)\n\n end",
"def colon!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 18 )\n\n type = COLON\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 152:9: ':'\n match( 0x3a )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 18 )\n\n end",
"def tokenize ; end",
"def tokenize ; end",
"def scan_for_dash(token); end",
"def quote!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 32 )\n\n type = QUOTE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 166:9: '\\\"'\n match( 0x22 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 32 )\n\n end",
"def colon!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 45 )\n\n type = COLON\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 370:9: ':'\n match( 0x3a )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 45 )\n\n end",
"def colon!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 27 )\n\n type = COLON\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 148:9: ':'\n match( 0x3a )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 27 )\n\n end",
"def t__40!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 11 )\n\n\n\n type = T__40\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 17:9: ','\n match( 0x2c )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 11 )\n\n\n end",
"def t__12!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 3)\n\n type = T__12\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 3)\n\n end",
"def quote!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 42 )\n\n type = QUOTE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 367:9: '\\\"'\n match( 0x22 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 42 )\n\n end",
"def space(*tokens)\n spacing_variants(tokens + ['']).uniq\nend",
"def lexeme_delimiter?(pos)\n @line[pos] == '!' || @line[pos] == ',' ||\n @line[pos] == \"\\n\" || space?(@line[pos]) ||\n @line[pos] == '…' || @line[pos, 3] == '...'\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 61)\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 607:6: ( ' ' | '\\\\t' | ( '\\\\r' )? '\\\\n' )+\n # at file 607:6: ( ' ' | '\\\\t' | ( '\\\\r' )? '\\\\n' )+\n match_count_19 = 0\n loop do\n alt_19 = 4\n case look_19 = @input.peek(1)\n when ?\\s then alt_19 = 1\n when ?\\t then alt_19 = 2\n when ?\\n, ?\\r then alt_19 = 3\n end\n case alt_19\n when 1\n # at line 607:8: ' '\n match(?\\s)\n\n when 2\n # at line 608:5: '\\\\t'\n match(?\\t)\n\n when 3\n # at line 609:5: ( '\\\\r' )? '\\\\n'\n # at line 609:5: ( '\\\\r' )?\n alt_18 = 2\n look_18_0 = @input.peek(1)\n\n if (look_18_0 == ?\\r) \n alt_18 = 1\n end\n case alt_18\n when 1\n # at line 609:5: '\\\\r'\n match(?\\r)\n\n end\n match(?\\n)\n\n else\n match_count_19 > 0 and break\n eee = EarlyExit(19)\n\n\n raise eee\n end\n match_count_19 += 1\n end\n\n # --> action\n channel=HIDDEN\n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 61)\n\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 77 )\n\n\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 604:6: ( ' ' | '\\\\r' | '\\\\t' | '\\\\u000C' | '\\\\n' )\n if @input.peek( 1 ).between?( 0x9, 0xa ) || @input.peek( 1 ).between?( 0xc, 0xd ) || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n # --> action\n channel=HIDDEN;\n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 77 )\n\n\n end",
"def t__13!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = T__13\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 9:9: ')'\n match( 0x29 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def operator!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 1)\n\n type = OPERATOR\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line \n if @input.peek(1).between?(?*, ?+) || @input.peek(1) == ?- || @input.peek(1) == ?/\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 1)\n\n end",
"def suppress_newlines\n @tokens.pop if value[0] == ?\\\n end",
"def _sep\n\n _save = self.pos\n while true # sequence\n\n _save1 = self.pos\n while true # choice\n _tmp = apply(:_end_hyphen_of_hyphen_line)\n break if _tmp\n self.pos = _save1\n _tmp = apply(:_comma)\n break if _tmp\n self.pos = _save1\n break\n end # end choice\n\n unless _tmp\n self.pos = _save\n break\n end\n while true\n\n _save3 = self.pos\n while true # choice\n _tmp = apply(:_space)\n break if _tmp\n self.pos = _save3\n _tmp = apply(:_comment)\n break if _tmp\n self.pos = _save3\n _tmp = apply(:_end_hyphen_of_hyphen_line)\n break if _tmp\n self.pos = _save3\n _tmp = apply(:_comma)\n break if _tmp\n self.pos = _save3\n break\n end # end choice\n\n break unless _tmp\n end\n _tmp = true\n unless _tmp\n self.pos = _save\n end\n break\n end # end sequence\n\n set_failed_rule :_sep unless _tmp\n return _tmp\n end",
"def squote!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 38 )\n\n type = SQUOTE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 173:10: '\\\\''\n match( 0x27 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 38 )\n\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 48 )\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 185:12: ( '\\\\t' | ' ' | '\\\\r' | '\\\\n' | '\\\\u000C' )+\n # at file 185:12: ( '\\\\t' | ' ' | '\\\\r' | '\\\\n' | '\\\\u000C' )+\n match_count_4 = 0\n while true\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x9, 0xa ) || look_4_0.between?( 0xc, 0xd ) || look_4_0 == 0x20 )\n alt_4 = 1\n\n end\n case alt_4\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x9, 0xa ) || @input.peek( 1 ).between?( 0xc, 0xd ) || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_4 > 0 and break\n eee = EarlyExit(4)\n\n\n raise eee\n end\n match_count_4 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 48 )\n\n end",
"def consume!\n empty_line ||\n name_token ||\n comment_token ||\n whitespace_token ||\n line_token ||\n heredoc_token ||\n string_token ||\n number_token ||\n regex_token ||\n literal_token\n end",
"def yylex # 826 lines\n c = ''\n self.space_seen = false\n command_state = false\n src = self.src\n\n self.token = nil\n self.yacc_value = nil\n\n return yylex_string if lex_strterm\n\n command_state = self.command_start\n self.command_start = false\n\n last_state = lex_state\n\n loop do # START OF CASE\n if src.scan(/[\\ \\t\\r\\f\\v]/) then # \\s - \\n + \\v\n self.space_seen = true\n next\n elsif src.check(/[^a-zA-Z]/) then\n if src.scan(/\\n|#/) then\n self.lineno = nil\n c = src.matched\n if c == '#' then\n src.pos -= 1\n\n while src.scan(/\\s*#.*(\\n+|\\z)/) do\n @comments << src.matched.gsub(/^ +#/, '#').gsub(/^ +$/, '')\n end\n\n return RubyLexer::EOF if src.eos?\n end\n\n # Replace a string of newlines with a single one\n src.scan(/\\n+/)\n\n next if in_lex_state?(:expr_beg, :expr_value, :expr_class,\n :expr_fname, :expr_dot)\n\n if src.scan(/([\\ \\t\\r\\f\\v]*)\\./) then\n self.space_seen = true unless src[1].empty?\n\n src.pos -= 1\n next unless src.check(/\\.\\./)\n end\n\n self.command_start = true\n self.lex_state = :expr_beg\n return :tNL\n elsif src.scan(/[\\]\\)\\}]/) then\n if src.matched == \"}\" then\n self.brace_nest -= 1\n else\n self.paren_nest -= 1\n end\n\n cond.lexpop\n cmdarg.lexpop\n tern.lexpop\n\n self.lex_state = if src.matched == \")\" then\n :expr_endfn\n else\n :expr_endarg\n end\n\n self.yacc_value = src.matched\n result = {\n \")\" => :tRPAREN,\n \"]\" => :tRBRACK,\n \"}\" => :tRCURLY\n }[src.matched]\n return result\n elsif src.scan(/\\!/) then\n if in_lex_state?(:expr_fname, :expr_dot) then\n self.lex_state = :expr_arg\n\n if src.scan(/@/) then\n self.yacc_value = \"!@\"\n return :tUBANG\n end\n else\n self.lex_state = :expr_beg\n end\n\n if src.scan(/[=~]/) then\n self.yacc_value = \"!#{src.matched}\"\n else\n self.yacc_value = \"!\"\n end\n\n return TOKENS[self.yacc_value]\n elsif src.scan(/\\.\\.\\.?|,|![=~]?/) then\n self.lex_state = :expr_beg\n tok = self.yacc_value = src.matched\n return TOKENS[tok]\n elsif src.check(/\\./) then\n if src.scan(/\\.\\d/) then\n rb_compile_error \"no .<digit> floating literal anymore put 0 before dot\"\n elsif src.scan(/\\./) then\n self.lex_state = :expr_dot\n self.yacc_value = \".\"\n return :tDOT\n end\n elsif src.scan(/\\(/) then\n result = if ruby18 then\n yylex_paren18\n else\n yylex_paren19\n end\n\n self.paren_nest += 1\n\n self.expr_beg_push \"(\"\n\n return result\n elsif src.check(/\\=/) then\n if src.scan(/\\=\\=\\=|\\=\\=|\\=~|\\=>|\\=(?!begin\\b)/) then\n self.fix_arg_lex_state\n tok = self.yacc_value = src.matched\n return TOKENS[tok]\n elsif src.scan(/\\=begin(?=\\s)/) then\n @comments << src.matched\n\n unless src.scan(/.*?\\n=end( |\\t|\\f)*[^\\n]*(\\n|\\z)/m) then\n @comments.clear\n rb_compile_error(\"embedded document meets end of file\")\n end\n\n @comments << src.matched\n\n next\n else\n raise \"you shouldn't be able to get here\"\n end\n elsif src.scan(/\\\"(#{ESC_RE}|#(#{ESC_RE}|[^\\{\\#\\@\\$\\\"\\\\])|[^\\\"\\\\\\#])*\\\"/o) then\n self.yacc_value = src.matched[1..-2].gsub(ESC_RE) { unescape $1 }\n self.lex_state = :expr_end\n return :tSTRING\n elsif src.scan(/\\\"/) then # FALLBACK\n self.lex_strterm = [:strterm, STR_DQUOTE, '\"', \"\\0\"] # TODO: question this\n self.yacc_value = \"\\\"\"\n return :tSTRING_BEG\n elsif src.scan(/\\@\\@?#{IDENT_CHAR_RE}+/o) then\n self.token = src.matched\n\n rb_compile_error \"`#{token}` is not allowed as a variable name\" if\n token =~ /\\@\\d/\n\n return process_token(command_state)\n elsif src.scan(/\\:\\:/) then\n if is_beg? || in_lex_state?(:expr_class) || is_space_arg? then\n self.lex_state = :expr_beg\n self.yacc_value = \"::\"\n return :tCOLON3\n end\n\n self.lex_state = :expr_dot\n self.yacc_value = \"::\"\n return :tCOLON2\n elsif ! is_end? && src.scan(/:([a-zA-Z_]#{IDENT_CHAR_RE}*(?:[?!]|=(?==>)|=(?![=>]))?)/) then\n # scanning shortcut to symbols\n self.yacc_value = src[1]\n self.lex_state = :expr_end\n return :tSYMBOL\n elsif src.scan(/\\:/) then\n # ?: / then / when\n if is_end? || src.check(/\\s/) then\n self.lex_state = :expr_beg\n # TODO warn_balanced(\":\", \"symbol literal\");\n self.yacc_value = \":\"\n return :tCOLON\n end\n\n case\n when src.scan(/\\'/) then\n self.lex_strterm = [:strterm, STR_SSYM, src.matched, \"\\0\"]\n when src.scan(/\\\"/) then\n self.lex_strterm = [:strterm, STR_DSYM, src.matched, \"\\0\"]\n end\n\n self.lex_state = :expr_fname\n self.yacc_value = \":\"\n return :tSYMBEG\n elsif src.check(/[0-9]/) then\n return parse_number\n elsif src.scan(/\\[/) then\n self.paren_nest += 1\n\n result = src.matched\n\n if in_lex_state? :expr_fname, :expr_dot then\n self.lex_state = :expr_arg\n case\n when src.scan(/\\]\\=/) then\n self.paren_nest -= 1 # HACK? I dunno, or bug in MRI\n self.yacc_value = \"[]=\"\n return :tASET\n when src.scan(/\\]/) then\n self.paren_nest -= 1 # HACK? I dunno, or bug in MRI\n self.yacc_value = \"[]\"\n return :tAREF\n else\n rb_compile_error \"unexpected '['\"\n end\n elsif is_beg? then\n self.tern.push false\n result = :tLBRACK\n elsif is_arg? && space_seen then\n self.tern.push false\n result = :tLBRACK\n else\n result = :tLBRACK2\n end\n\n self.expr_beg_push \"[\"\n\n return result\n elsif src.scan(/\\'(\\\\.|[^\\'])*\\'/) then\n self.yacc_value = src.matched[1..-2].gsub(/\\\\\\\\/, \"\\\\\").gsub(/\\\\'/, \"'\") # \"\n self.lex_state = :expr_end\n return :tSTRING\n elsif src.check(/\\|/) then\n if src.scan(/\\|\\|\\=/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"||\"\n return :tOP_ASGN\n elsif src.scan(/\\|\\|/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"||\"\n return :tOROP\n elsif src.scan(/\\|\\=/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"|\"\n return :tOP_ASGN\n elsif src.scan(/\\|/) then\n self.fix_arg_lex_state\n self.yacc_value = \"|\"\n return :tPIPE\n end\n elsif src.scan(/\\{/) then\n self.brace_nest += 1\n if lpar_beg && lpar_beg == paren_nest then\n self.lpar_beg = nil\n self.paren_nest -= 1\n\n expr_beg_push \"{\"\n\n return :tLAMBEG\n end\n\n result = if is_arg? || in_lex_state?(:expr_end, :expr_endfn) then\n :tLCURLY # block (primary)\n elsif in_lex_state?(:expr_endarg) then\n :tLBRACE_ARG # block (expr)\n else\n self.tern.push false\n :tLBRACE # hash\n end\n\n self.expr_beg_push \"{\"\n self.command_start = true unless result == :tLBRACE\n\n return result\n elsif src.scan(/->/) then\n self.lex_state = :expr_endfn\n return :tLAMBDA\n elsif src.scan(/[+-]/) then\n sign = src.matched\n utype, type = if sign == \"+\" then\n [:tUPLUS, :tPLUS]\n else\n [:tUMINUS, :tMINUS]\n end\n\n if in_lex_state? :expr_fname, :expr_dot then\n self.lex_state = :expr_arg\n if src.scan(/@/) then\n self.yacc_value = \"#{sign}@\"\n return utype\n else\n self.yacc_value = sign\n return type\n end\n end\n\n if src.scan(/\\=/) then\n self.lex_state = :expr_beg\n self.yacc_value = sign\n return :tOP_ASGN\n end\n\n if (is_beg? || (is_arg? && space_seen && !src.check(/\\s/))) then\n if is_arg? then\n arg_ambiguous\n end\n\n self.lex_state = :expr_beg\n self.yacc_value = sign\n\n if src.check(/\\d/) then\n if utype == :tUPLUS then\n return self.parse_number\n else\n return :tUMINUS_NUM\n end\n end\n\n return utype\n end\n\n self.lex_state = :expr_beg\n self.yacc_value = sign\n return type\n elsif src.check(/\\*/) then\n if src.scan(/\\*\\*=/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"**\"\n return :tOP_ASGN\n elsif src.scan(/\\*\\*/) then\n result = if is_space_arg? src.check(/./m) then\n warning \"`**' interpreted as argument prefix\"\n :tDSTAR\n elsif is_beg? then\n :tDSTAR\n else\n # TODO: warn_balanced(\"**\", \"argument prefix\");\n :tPOW\n end\n self.yacc_value = \"**\"\n self.fix_arg_lex_state\n return result\n elsif src.scan(/\\*\\=/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"*\"\n return :tOP_ASGN\n elsif src.scan(/\\*/) then\n result = if is_space_arg? src.check(/./m) then\n warning(\"`*' interpreted as argument prefix\")\n :tSTAR\n elsif is_beg? then\n :tSTAR\n else\n # TODO: warn_balanced(\"*\", \"argument prefix\");\n :tSTAR2 # TODO: rename\n end\n\n self.yacc_value = \"*\"\n self.fix_arg_lex_state\n return result\n end\n elsif src.check(/\\</) then\n if src.scan(/\\<\\=\\>/) then\n self.fix_arg_lex_state\n self.yacc_value = \"<=>\"\n return :tCMP\n elsif src.scan(/\\<\\=/) then\n self.fix_arg_lex_state\n self.yacc_value = \"<=\"\n return :tLEQ\n elsif src.scan(/\\<\\<\\=/) then\n self.fix_arg_lex_state\n self.lex_state = :expr_beg\n self.yacc_value = \"\\<\\<\"\n return :tOP_ASGN\n elsif src.scan(/\\<\\</) then\n if (!in_lex_state?(:expr_dot, :expr_class) &&\n !is_end? &&\n (!is_arg? || space_seen)) then\n tok = self.heredoc_identifier\n return tok if tok\n end\n\n self.fix_arg_lex_state\n self.yacc_value = \"\\<\\<\"\n return :tLSHFT\n elsif src.scan(/\\</) then\n self.fix_arg_lex_state\n self.yacc_value = \"<\"\n return :tLT\n end\n elsif src.check(/\\>/) then\n if src.scan(/\\>\\=/) then\n self.fix_arg_lex_state\n self.yacc_value = \">=\"\n return :tGEQ\n elsif src.scan(/\\>\\>=/) then\n self.fix_arg_lex_state\n self.lex_state = :expr_beg\n self.yacc_value = \">>\"\n return :tOP_ASGN\n elsif src.scan(/\\>\\>/) then\n self.fix_arg_lex_state\n self.yacc_value = \">>\"\n return :tRSHFT\n elsif src.scan(/\\>/) then\n self.fix_arg_lex_state\n self.yacc_value = \">\"\n return :tGT\n end\n elsif src.scan(/\\`/) then\n self.yacc_value = \"`\"\n case lex_state\n when :expr_fname then\n self.lex_state = :expr_end\n return :tBACK_REF2\n when :expr_dot then\n self.lex_state = if command_state then\n :expr_cmdarg\n else\n :expr_arg\n end\n return :tBACK_REF2\n end\n self.lex_strterm = [:strterm, STR_XQUOTE, '`', \"\\0\"]\n return :tXSTRING_BEG\n elsif src.scan(/\\?/) then\n\n if is_end? then\n self.lex_state = ruby18 ? :expr_beg : :expr_value # HACK?\n self.tern.push true\n self.yacc_value = \"?\"\n return :tEH\n end\n\n if src.eos? then\n rb_compile_error \"incomplete character syntax\"\n end\n\n if src.check(/\\s|\\v/) then\n unless is_arg? then\n c2 = { \" \" => 's',\n \"\\n\" => 'n',\n \"\\t\" => 't',\n \"\\v\" => 'v',\n \"\\r\" => 'r',\n \"\\f\" => 'f' }[src.matched]\n\n if c2 then\n warning(\"invalid character syntax; use ?\\\\\" + c2)\n end\n end\n\n # ternary\n self.lex_state = ruby18 ? :expr_beg : :expr_value # HACK?\n self.tern.push true\n self.yacc_value = \"?\"\n return :tEH\n elsif src.check(/\\w(?=\\w)/) then # ternary, also\n self.lex_state = :expr_beg\n self.tern.push true\n self.yacc_value = \"?\"\n return :tEH\n end\n\n c = if src.scan(/\\\\/) then\n self.read_escape\n else\n src.getch\n end\n self.lex_state = :expr_end\n\n if version == 18 then\n self.yacc_value = c[0].ord & 0xff\n return :tINTEGER\n else\n self.yacc_value = c\n return :tSTRING\n end\n elsif src.check(/\\&/) then\n if src.scan(/\\&\\&\\=/) then\n self.yacc_value = \"&&\"\n self.lex_state = :expr_beg\n return :tOP_ASGN\n elsif src.scan(/\\&\\&/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"&&\"\n return :tANDOP\n elsif src.scan(/\\&\\=/) then\n self.yacc_value = \"&\"\n self.lex_state = :expr_beg\n return :tOP_ASGN\n elsif src.scan(/&/) then\n result = if is_arg? && space_seen &&\n !src.check(/\\s/) then\n warning(\"`&' interpreted as argument prefix\")\n :tAMPER\n elsif in_lex_state? :expr_beg, :expr_mid then\n :tAMPER\n else\n :tAMPER2\n end\n\n self.fix_arg_lex_state\n self.yacc_value = \"&\"\n return result\n end\n elsif src.scan(/\\//) then\n if is_beg? then\n self.lex_strterm = [:strterm, STR_REGEXP, '/', \"\\0\"]\n self.yacc_value = \"/\"\n return :tREGEXP_BEG\n end\n\n if src.scan(/\\=/) then\n self.yacc_value = \"/\"\n self.lex_state = :expr_beg\n return :tOP_ASGN\n end\n\n if is_arg? && space_seen then\n unless src.scan(/\\s/) then\n arg_ambiguous\n self.lex_strterm = [:strterm, STR_REGEXP, '/', \"\\0\"]\n self.yacc_value = \"/\"\n return :tREGEXP_BEG\n end\n end\n\n self.fix_arg_lex_state\n self.yacc_value = \"/\"\n\n return :tDIVIDE\n elsif src.scan(/\\^=/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"^\"\n return :tOP_ASGN\n elsif src.scan(/\\^/) then\n self.fix_arg_lex_state\n self.yacc_value = \"^\"\n return :tCARET\n elsif src.scan(/\\;/) then\n self.command_start = true\n self.lex_state = :expr_beg\n self.yacc_value = \";\"\n return :tSEMI\n elsif src.scan(/\\~/) then\n if in_lex_state? :expr_fname, :expr_dot then\n src.scan(/@/)\n end\n\n self.fix_arg_lex_state\n self.yacc_value = \"~\"\n\n return :tTILDE\n elsif src.scan(/\\\\/) then\n if src.scan(/\\r?\\n/) then\n self.lineno = nil\n self.space_seen = true\n next\n end\n rb_compile_error \"bare backslash only allowed before newline\"\n elsif src.scan(/\\%/) then\n if is_beg? then\n return parse_quote\n end\n\n if src.scan(/\\=/) then\n self.lex_state = :expr_beg\n self.yacc_value = \"%\"\n return :tOP_ASGN\n end\n\n return parse_quote if is_arg? && space_seen && ! src.check(/\\s/)\n\n self.fix_arg_lex_state\n self.yacc_value = \"%\"\n\n return :tPERCENT\n elsif src.check(/\\$/) then\n if src.scan(/(\\$_)(\\w+)/) then\n self.lex_state = :expr_end\n self.token = src.matched\n return process_token(command_state)\n elsif src.scan(/\\$_/) then\n self.lex_state = :expr_end\n self.token = src.matched\n self.yacc_value = src.matched\n return :tGVAR\n elsif src.scan(/\\$[~*$?!@\\/\\\\;,.=:<>\\\"]|\\$-\\w?/) then\n self.lex_state = :expr_end\n self.yacc_value = src.matched\n return :tGVAR\n elsif src.scan(/\\$([\\&\\`\\'\\+])/) then\n self.lex_state = :expr_end\n # Explicit reference to these vars as symbols...\n if last_state == :expr_fname then\n self.yacc_value = src.matched\n return :tGVAR\n else\n self.yacc_value = src[1].to_sym\n return :tBACK_REF\n end\n elsif src.scan(/\\$([1-9]\\d*)/) then\n self.lex_state = :expr_end\n if last_state == :expr_fname then\n self.yacc_value = src.matched\n return :tGVAR\n else\n self.yacc_value = src[1].to_i\n return :tNTH_REF\n end\n elsif src.scan(/\\$0/) then\n self.lex_state = :expr_end\n self.token = src.matched\n return process_token(command_state)\n elsif src.scan(/\\$\\W|\\$\\z/) then # TODO: remove?\n self.lex_state = :expr_end\n self.yacc_value = \"$\"\n return \"$\"\n elsif src.scan(/\\$\\w+/)\n self.lex_state = :expr_end\n self.token = src.matched\n return process_token(command_state)\n end\n elsif src.check(/\\_/) then\n if src.beginning_of_line? && src.scan(/\\__END__(\\r?\\n|\\Z)/) then\n self.lineno = nil\n return RubyLexer::EOF\n elsif src.scan(/\\_\\w*/) then\n self.token = src.matched\n return process_token(command_state)\n end\n end\n end # END OF CASE\n\n if src.scan(/\\004|\\032|\\000/) || src.eos? then # ^D, ^Z, EOF\n return RubyLexer::EOF\n else # alpha check\n rb_compile_error \"Invalid char #{src.rest[0].chr} in expression\" unless\n src.check IDENT_RE\n end\n\n self.token = src.matched if self.src.scan IDENT_RE\n\n return process_token(command_state)\n end\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 48 )\n\n\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 227:5: ( ' ' | '\\\\t' )+\n # at file 227:5: ( ' ' | '\\\\t' )+\n match_count_5 = 0\n while true\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0 == 0x9 || look_5_0 == 0x20 )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line \n if @input.peek(1) == 0x9 || @input.peek(1) == 0x20\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n\n # --> action\n channel = HIDDEN\n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 48 )\n\n\n end",
"def ws!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 103 )\n\n type = WS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 928:5: ( '\\\\t' | '\\\\f' | ' ' | '\\\\u00A0' )+\n # at file 928:5: ( '\\\\t' | '\\\\f' | ' ' | '\\\\u00A0' )+\n match_count_32 = 0\n while true\n alt_32 = 2\n look_32_0 = @input.peek( 1 )\n\n if ( look_32_0 == 0x9 || look_32_0 == 0xc || look_32_0 == 0x20 || look_32_0 == 0xa0 )\n alt_32 = 1\n\n end\n case alt_32\n when 1\n # at line \n if @input.peek(1) == 0x9 || @input.peek(1) == 0xc || @input.peek(1) == 0x20 || @input.peek(1) == 0xa0\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n else\n match_count_32 > 0 and break\n eee = EarlyExit(32)\n\n\n raise eee\n end\n match_count_32 += 1\n end\n\n # --> action\n channel = HIDDEN \n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 103 )\n\n end",
"def determine_field_delimiter(l)\n if l =~ /[\\,\\|]/ \n /\\s*[\\,\\|]\\s*/\n else\n /\\s/\n end\n end",
"def _Indent\n _tmp = scan(/\\G(?-mix:\\t| )/)\n set_failed_rule :_Indent unless _tmp\n return _tmp\n end",
"def peek_no_space; end",
"def eol!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n\n\n type = EOL\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 269:3: ';'\n match( 0x3b )\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n\n end",
"def tokenize; end",
"def tokenize; end",
"def t__83!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 28)\n\n type = T__83\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 34:9: ')'\n match(?))\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 28)\n\n end",
"def skip_white_space_or_to_eoln\r\n while (next_char = @source.get)\r\n return next_char if (next_char > ' ') || @source.eoln?\r\n end\r\n end",
"def tokenize input\n setup_scanner input\n\n until @s.eos? do\n pos = @s.pos\n\n # leading spaces will be reflected by the column of the next token\n # the only thing we loose are trailing spaces at the end of the file\n next if @s.scan(/ +/)\n\n # note: after BULLET, LABEL, etc.,\n # indent will be the column of the next non-newline token\n\n @tokens << case\n # [CR]LF => :NEWLINE\n when @s.scan(/\\r?\\n/) then\n token = [:NEWLINE, @s.matched, *pos]\n @s.newline!\n token\n # === text => :HEADER then :TEXT\n when @s.scan(/(=+)(\\s*)/) then\n level = @s[1].length\n header = [:HEADER, level, *pos]\n\n if @s[2] =~ /^\\r?\\n/ then\n @s.unscan(@s[2])\n header\n else\n pos = @s.pos\n @s.scan(/.*/)\n @tokens << header\n [:TEXT, @s.matched.sub(/\\r$/, ''), *pos]\n end\n # --- (at least 3) and nothing else on the line => :RULE\n when @s.scan(/(-{3,}) *\\r?$/) then\n [:RULE, @s[1].length - 2, *pos]\n # * or - followed by white space and text => :BULLET\n when @s.scan(/([*-]) +(\\S)/) then\n @s.unscan(@s[2])\n [:BULLET, @s[1], *pos]\n # A. text, a. text, 12. text => :UALPHA, :LALPHA, :NUMBER\n when @s.scan(/([a-z]|\\d+)\\. +(\\S)/i) then\n # FIXME if tab(s), the column will be wrong\n # either support tabs everywhere by first expanding them to\n # spaces, or assume that they will have been replaced\n # before (and provide a check for that at least in debug\n # mode)\n list_label = @s[1]\n @s.unscan(@s[2])\n list_type =\n case list_label\n when /[a-z]/ then :LALPHA\n when /[A-Z]/ then :UALPHA\n when /\\d/ then :NUMBER\n else\n raise ParseError, \"BUG token #{list_label}\"\n end\n [list_type, list_label, *pos]\n # [text] followed by spaces or end of line => :LABEL\n when @s.scan(/\\[(.*?)\\]( +|\\r?$)/) then\n [:LABEL, @s[1], *pos]\n # text:: followed by spaces or end of line => :NOTE\n when @s.scan(/(.*?)::( +|\\r?$)/) then\n [:NOTE, @s[1], *pos]\n # >>> followed by end of line => :BLOCKQUOTE\n when @s.scan(/>>> *(\\w+)?$/) then\n [:BLOCKQUOTE, @s[1], *pos]\n # anything else: :TEXT\n else\n @s.scan(/(.*?)( )?\\r?$/)\n token = [:TEXT, @s[1], *pos]\n\n if @s[2] then\n @tokens << token\n [:BREAK, @s[2], pos[0] + @s[1].length, pos[1]]\n else\n token\n end\n end\n end\n\n self\n end",
"def char\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 59 )\n\n\n return_value = CharReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n\n root_0 = nil\n\n __COMILLA321__ = nil\n set322 = nil\n __COMILLA323__ = nil\n\n\n tree_for_COMILLA321 = nil\n tree_for_set322 = nil\n tree_for_COMILLA323 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 569:10: COMILLA ( EscapeSequence |~ ( '\\\\'' | '\\\\\\\\' ) ) COMILLA\n __COMILLA321__ = match( COMILLA, TOKENS_FOLLOWING_COMILLA_IN_char_2781 )\n if @state.backtracking == 0\n tree_for_COMILLA321 = @adaptor.create_with_payload( __COMILLA321__ )\n @adaptor.add_child( root_0, tree_for_COMILLA321 )\n\n end\n\n\n set322 = @input.look\n\n if @input.peek( 1 ).between?( ASIGNACION, COMA ) || @input.peek( 1 ).between?( CORDER, WS ) || @input.peek( 1 ).between?( T__81, T__82 )\n @input.consume\n if @state.backtracking == 0\n @adaptor.add_child( root_0, @adaptor.create_with_payload( set322 ) )\n end\n\n @state.error_recovery = false\n\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n\n mse = MismatchedSet( nil )\n raise mse\n\n end\n\n\n __COMILLA323__ = match( COMILLA, TOKENS_FOLLOWING_COMILLA_IN_char_2798 )\n if @state.backtracking == 0\n tree_for_COMILLA323 = @adaptor.create_with_payload( __COMILLA323__ )\n @adaptor.add_child( root_0, tree_for_COMILLA323 )\n\n end\n\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n if @state.backtracking == 0\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 59 )\n\n\n end\n\n return return_value\n end",
"def space; txt ' ','' end",
"def st_lex(raw)\n @tokens = raw.delete(' ').gsub(\"\\n\", '')\n end"
] | [
"0.72857696",
"0.7151333",
"0.7031983",
"0.7002252",
"0.6868628",
"0.67804253",
"0.6627531",
"0.6617852",
"0.6569228",
"0.6249484",
"0.6244152",
"0.62422854",
"0.6228803",
"0.62190044",
"0.61122173",
"0.6088007",
"0.60851634",
"0.597342",
"0.5960626",
"0.59551775",
"0.5930669",
"0.59277767",
"0.59161854",
"0.5901218",
"0.5893189",
"0.58631724",
"0.58605105",
"0.58450586",
"0.5835122",
"0.58199894",
"0.58051395",
"0.5792245",
"0.57815206",
"0.5775604",
"0.5770858",
"0.5769408",
"0.5769289",
"0.5740954",
"0.5734984",
"0.57330394",
"0.57303494",
"0.5698911",
"0.5684735",
"0.568082",
"0.56646776",
"0.5645332",
"0.56209135",
"0.56001675",
"0.5596527",
"0.5590323",
"0.55884755",
"0.55884755",
"0.55884755",
"0.557663",
"0.55738974",
"0.5558604",
"0.55584323",
"0.5557589",
"0.5557589",
"0.555492",
"0.555492",
"0.555492",
"0.55517054",
"0.5551526",
"0.55378246",
"0.55362433",
"0.55362433",
"0.5535613",
"0.55347264",
"0.5518071",
"0.551735",
"0.55167156",
"0.5514928",
"0.55131876",
"0.5512804",
"0.5507926",
"0.55073994",
"0.5507196",
"0.55041593",
"0.55019027",
"0.5482313",
"0.5480044",
"0.54797024",
"0.54744977",
"0.5471497",
"0.5467192",
"0.5465023",
"0.5460292",
"0.5459903",
"0.54580015",
"0.5453825",
"0.54272944",
"0.5422858",
"0.5422858",
"0.5419955",
"0.5410791",
"0.5410708",
"0.5406473",
"0.5404153",
"0.54041487"
] | 0.7325442 | 0 |
main rule used to study the input at the current position, and choose the proper lexer rule to call in order to fetch the next token usually, you don't make direct calls to this method, but instead use the next_token method, which will build and emit the actual next token | def token!
# at line 1:8: ( T__6 | NUMBER | SPACE )
alt_3 = 3
case look_3 = @input.peek( 1 )
when 0x2b then alt_3 = 1
when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39 then alt_3 = 2
when 0x20 then alt_3 = 3
else
raise NoViableAlternative( "", 3, 0 )
end
case alt_3
when 1
# at line 1:10: T__6
t__6!
when 2
# at line 1:15: NUMBER
number!
when 3
# at line 1:22: SPACE
space!
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def next_token\n\n token = nil\n\n until ss.eos? or token do\n if ss.check(/\\n/) then\n self.lineno += 1\n # line starts 1 position after the newline\n self.start_of_current_line_pos = ss.pos + 1\n end\n self.old_pos = ss.pos\n token =\n case state\n when nil, :option, :inner, :start, :macro, :rule, :group then\n case\n when ss.skip(/options?.*/) then\n [:state, :option]\n when ss.skip(/inner.*/) then\n [:state, :inner]\n when ss.skip(/macros?.*/) then\n [:state, :macro]\n when ss.skip(/rules?.*/) then\n [:state, :rule]\n when ss.skip(/start.*/) then\n [:state, :start]\n when ss.skip(/end/) then\n [:state, :END]\n when ss.skip(/\\A((?:.|\\n)*)class ([\\w:]+.*)/) then\n action { [:class, *matches] }\n when ss.skip(/\\n+/) then\n # do nothing\n when text = ss.scan(/\\s*(\\#.*)/) then\n action { [:comment, text] }\n when (state == :option) && (ss.skip(/\\s+/)) then\n # do nothing\n when (state == :option) && (text = ss.scan(/stub/i)) then\n action { [:option, text] }\n when (state == :option) && (text = ss.scan(/debug/i)) then\n action { [:option, text] }\n when (state == :option) && (text = ss.scan(/do_parse/i)) then\n action { [:option, text] }\n when (state == :option) && (text = ss.scan(/lineno/i)) then\n action { [:option, text] }\n when (state == :option) && (text = ss.scan(/column/i)) then\n action { [:option, text] }\n when (state == :inner) && (text = ss.scan(/.*/)) then\n action { [:inner, text] }\n when (state == :start) && (text = ss.scan(/.*/)) then\n action { [:start, text] }\n when (state == :macro) && (ss.skip(/\\s+(\\w+)\\s+#{RE}/o)) then\n action { [:macro, *matches] }\n when (state == :rule) && (ss.skip(/\\s*#{ST}?[\\ \\t]*#{RE}[\\ \\t]*#{ACT}?/o)) then\n action { [:rule, *matches] }\n when (state == :rule) && (ss.skip(/\\s*:[\\ \\t]*#{RE}/o)) then\n action { [:grouphead, *matches] }\n when (state == :group) && (ss.skip(/\\s*:[\\ \\t]*#{RE}/o)) then\n action { [:grouphead, *matches] }\n when (state == :group) && (ss.skip(/\\s*\\|\\s*#{ST}?[\\ \\t]*#{RE}[\\ \\t]*#{ACT}?/o)) then\n action { [:group, *matches] }\n when (state == :group) && (ss.skip(/\\s*#{ST}?[\\ \\t]*#{RE}[\\ \\t]*#{ACT}?/o)) then\n action { [:groupend, *matches] }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n when :END then\n case\n when ss.skip(/\\n+/) then\n # do nothing\n when text = ss.scan(/.*/) then\n action { [:end, text] }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n else\n raise ScanError, \"undefined state at #{location}: '#{state}'\"\n end # token = case state\n\n next unless token # allow functions to trigger redo w/ nil\n end # while\n\n raise LexerError, \"bad lexical result at #{location}: #{token.inspect}\" unless\n token.nil? || (Array === token && token.size >= 2)\n\n # auto-switch state\n self.state = token.last if token && token.first == :state\n\n token\n end",
"def next_token\n\t\t@token = @input.next_token\n\tend",
"def next_token; end",
"def next_token\n\n token = nil\n\n until ss.eos? or token do\n token =\n case state\n when nil then\n case\n when text = ss.scan(/#{DIGIT}/) then\n action { [:DIGIT, text.to_i] }\n when text = ss.scan(/#{ADDITION}/) then\n action { [:ADDITION, text] }\n when text = ss.scan(/#{SUBSTRACTION}/) then\n action { [:SUBSTRACTION, text] }\n when text = ss.scan(/#{MULTIPLICATION}/) then\n action { [:MULTIPLICATION, text] }\n when text = ss.scan(/#{DIVISION}/) then\n action { [:DIVISION, text] }\n when text = ss.scan(/#{OPENING_PARANTHESIS}/) then\n action { [:OPENING_PARANTHESIS, text] }\n when text = ss.scan(/#{CLOSING_PARANTHESIS}/) then\n action { [:CLOSING_PARANTHESIS, text] }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n else\n raise ScanError, \"undefined state at #{location}: '#{state}'\"\n end # token = case state\n\n next unless token # allow functions to trigger redo w/ nil\n end # while\n\n raise LexerError, \"bad lexical result at #{location}: #{token.inspect}\" unless\n token.nil? || (Array === token && token.size >= 2)\n\n # auto-switch state\n self.state = token.last if token && token.first == :state\n\n token\n end",
"def next_token\n\n if @ss.bol?\n @line+=1\n @old_pos=@ss.pos\n end\n\n position=[@line,@ss.pos-@old_pos+1]\n\n return :eos if @ss.eos?\n\n case\n when text = @ss.scan(NEWLINE)\n next_token()\n when text = @ss.scan(SPACE)\n next_token()\n when text = @ss.scan(COMMENT)\n next_token()\n when text = @ss.scan(ARROW)\n return Token.new [:arrow,text,position]\n when text = @ss.scan(LT)\n return Token.new [:lt,text,position]\n when text = @ss.scan(LBRACK)\n return Token.new [:lbrack,text,position]\n when text = @ss.scan(RBRACK)\n return Token.new [:rbrack,text,position]\n when text = @ss.scan(IDENTIFIER)\n case\n when value = text.match(IDENT)\n return Token.new [:IDENT,text,position]\n when value = text.match(FLOAT)\n return Token.new [:FLOAT,text,position]\n when value = text.match(INT)\n return Token.new [:INT,text,position]\n when value = text.match(STRING)\n return Token.new [:STRING,text,position]\n when value = text.match(MODULE)\n return Token.new [:module,text,position]\n when value = text.match(CLASS)\n return Token.new [:class,text,position]\n when value = text.match(END_)\n return Token.new [:end,text,position]\n when value = text.match(ATTR)\n return Token.new [:attr,text,position]\n when value = text.match(LPAREN)\n return Token.new [:lparen,text,position]\n when value = text.match(RPAREN)\n return Token.new [:rparen,text,position]\n else\n return Token.new [:identifier,text,position]\n end\n else\n x = @ss.getch\n return Token.new [x, x,position]\n end\n end",
"def next_token\n return process_string if lex_strterm\n self.cmd_state = self.command_start\n self.command_start = false\n self.space_seen = false # TODO: rename token_seen?\n self.last_state = lex_state\n\n token = nil\n\n until ss.eos? or token do\n token =\n case state\n when nil then\n case\n when ss.skip(/[\\ \\t\\r\\f\\v]/) then\n action { self.space_seen = true; next }\n when text = ss.scan(/\\n|\\#/) then\n process_newline_or_comment text\n when text = ss.scan(/[\\]\\)\\}]/) then\n process_brace_close text\n when ss.match?(/\\!/) then\n case\n when is_after_operator? && (ss.skip(/\\!\\@/)) then\n action { result EXPR_ARG, :tUBANG, \"!@\" }\n when text = ss.scan(/\\![=~]?/) then\n action { result :arg_state, TOKENS[text], text }\n end # group /\\!/\n when ss.match?(/\\./) then\n case\n when text = ss.scan(/\\.\\.\\.?/) then\n action { result EXPR_BEG, TOKENS[text], text }\n when ss.skip(/\\.\\d/) then\n action { rb_compile_error \"no .<digit> floating literal anymore put 0 before dot\" }\n when ss.skip(/\\./) then\n action { self.lex_state = EXPR_BEG; result EXPR_DOT, :tDOT, \".\" }\n end # group /\\./\n when text = ss.scan(/\\(/) then\n process_paren text\n when text = ss.scan(/\\,/) then\n action { result EXPR_PAR, TOKENS[text], text }\n when ss.match?(/=/) then\n case\n when text = ss.scan(/\\=\\=\\=|\\=\\=|\\=~|\\=>|\\=(?!begin\\b)/) then\n action { result arg_state, TOKENS[text], text }\n when bol? && (text = ss.scan(/\\=begin(?=\\s)/)) then\n process_begin text\n when text = ss.scan(/\\=(?=begin\\b)/) then\n action { result arg_state, TOKENS[text], text }\n end # group /=/\n when ruby22_label? && (text = ss.scan(/\\\"#{SIMPLE_STRING}\\\":/o)) then\n process_label text\n when text = ss.scan(/\\\"(#{SIMPLE_STRING})\\\"/o) then\n action { result EXPR_END, :tSTRING, text[1..-2].gsub(ESC) { unescape $1 } }\n when text = ss.scan(/\\\"/) then\n action { string STR_DQUOTE; result nil, :tSTRING_BEG, text }\n when text = ss.scan(/\\@\\@?\\d/) then\n action { rb_compile_error \"`#{text}` is not allowed as a variable name\" }\n when text = ss.scan(/\\@\\@?#{IDENT_CHAR}+/o) then\n process_ivar text\n when ss.match?(/:/) then\n case\n when not_end? && (text = ss.scan(/:([a-zA-Z_]#{IDENT_CHAR}*(?:[?]|[!](?!=)|=(?==>)|=(?![=>]))?)/o)) then\n process_symbol text\n when not_end? && (text = ss.scan(/\\:\\\"(#{SIMPLE_STRING})\\\"/o)) then\n process_symbol text\n when not_end? && (text = ss.scan(/\\:\\'(#{SSTRING})\\'/o)) then\n process_symbol text\n when text = ss.scan(/\\:\\:/) then\n process_colon2 text\n when text = ss.scan(/\\:/) then\n process_colon1 text\n end # group /:/\n when ss.skip(/->/) then\n action { result EXPR_ENDFN, :tLAMBDA, nil }\n when text = ss.scan(/[+-]/) then\n process_plus_minus text\n when ss.match?(/[+\\d]/) then\n case\n when ss.skip(/#{NUM_BAD}/o) then\n action { rb_compile_error \"Invalid numeric format\" }\n when ss.skip(/#{INT_DEC}/o) then\n action { int_with_base 10 }\n when ss.skip(/#{INT_HEX}/o) then\n action { int_with_base 16 }\n when ss.skip(/#{INT_BIN}/o) then\n action { int_with_base 2 }\n when ss.skip(/#{INT_OCT_BAD}/o) then\n action { rb_compile_error \"Illegal octal digit.\" }\n when ss.skip(/#{INT_OCT}/o) then\n action { int_with_base 8 }\n when ss.skip(/#{FLOAT_BAD}/o) then\n action { rb_compile_error \"Trailing '_' in number.\" }\n when text = ss.scan(/#{FLOAT}/o) then\n process_float text\n when ss.skip(/#{INT_DEC2}/o) then\n action { int_with_base 10 }\n when ss.skip(/[0-9]/) then\n action { rb_compile_error \"Bad number format\" }\n end # group /[+\\d]/\n when text = ss.scan(/\\[/) then\n process_square_bracket text\n when was_label? && (text = ss.scan(/\\'#{SSTRING}\\':?/o)) then\n process_label_or_string text\n when ss.match?(/\\|/) then\n case\n when ss.skip(/\\|\\|\\=/) then\n action { result EXPR_BEG, :tOP_ASGN, \"||\" }\n when ss.skip(/\\|\\|/) then\n action { result EXPR_BEG, :tOROP, \"||\" }\n when ss.skip(/\\|\\=/) then\n action { result EXPR_BEG, :tOP_ASGN, \"|\" }\n when ss.skip(/\\|/) then\n action { state = is_after_operator? ? EXPR_ARG : EXPR_PAR; result state, :tPIPE, \"|\" }\n end # group /\\|/\n when text = ss.scan(/\\{/) then\n process_brace_open text\n when ss.match?(/\\*/) then\n case\n when ss.skip(/\\*\\*=/) then\n action { result EXPR_BEG, :tOP_ASGN, \"**\" }\n when ss.skip(/\\*\\*/) then\n action { result(:arg_state, space_vs_beginning(:tDSTAR, :tDSTAR, :tPOW), \"**\") }\n when ss.skip(/\\*\\=/) then\n action { result(EXPR_BEG, :tOP_ASGN, \"*\") }\n when ss.skip(/\\*/) then\n action { result(:arg_state, space_vs_beginning(:tSTAR, :tSTAR, :tSTAR2), \"*\") }\n end # group /\\*/\n when ss.match?(/</) then\n case\n when ss.skip(/\\<\\=\\>/) then\n action { result :arg_state, :tCMP, \"<=>\" }\n when ss.skip(/\\<\\=/) then\n action { result :arg_state, :tLEQ, \"<=\" }\n when ss.skip(/\\<\\<\\=/) then\n action { result EXPR_BEG, :tOP_ASGN, \"<<\" }\n when text = ss.scan(/\\<\\</) then\n process_lchevron text\n when ss.skip(/\\</) then\n action { result :arg_state, :tLT, \"<\" }\n end # group /</\n when ss.match?(/>/) then\n case\n when ss.skip(/\\>\\=/) then\n action { result :arg_state, :tGEQ, \">=\" }\n when ss.skip(/\\>\\>=/) then\n action { result EXPR_BEG, :tOP_ASGN, \">>\" }\n when ss.skip(/\\>\\>/) then\n action { result :arg_state, :tRSHFT, \">>\" }\n when ss.skip(/\\>/) then\n action { result :arg_state, :tGT, \">\" }\n end # group />/\n when ss.match?(/\\`/) then\n case\n when expr_fname? && (ss.skip(/\\`/)) then\n action { result(EXPR_END, :tBACK_REF2, \"`\") }\n when expr_dot? && (ss.skip(/\\`/)) then\n action { result((cmd_state ? EXPR_CMDARG : EXPR_ARG), :tBACK_REF2, \"`\") }\n when ss.skip(/\\`/) then\n action { string STR_XQUOTE, '`'; result(nil, :tXSTRING_BEG, \"`\") }\n end # group /\\`/\n when text = ss.scan(/\\?/) then\n process_questionmark text\n when ss.match?(/&/) then\n case\n when ss.skip(/\\&\\&\\=/) then\n action { result(EXPR_BEG, :tOP_ASGN, \"&&\") }\n when ss.skip(/\\&\\&/) then\n action { result(EXPR_BEG, :tANDOP, \"&&\") }\n when ss.skip(/\\&\\=/) then\n action { result(EXPR_BEG, :tOP_ASGN, \"&\" ) }\n when ss.skip(/\\&\\./) then\n action { result(EXPR_DOT, :tLONELY, \"&.\") }\n when text = ss.scan(/\\&/) then\n process_amper text\n end # group /&/\n when text = ss.scan(/\\//) then\n process_slash text\n when ss.match?(/\\^/) then\n case\n when ss.skip(/\\^=/) then\n action { result(EXPR_BEG, :tOP_ASGN, \"^\") }\n when ss.skip(/\\^/) then\n action { result(:arg_state, :tCARET, \"^\") }\n end # group /\\^/\n when ss.skip(/\\;/) then\n action { self.command_start = true; result(EXPR_BEG, :tSEMI, \";\") }\n when ss.match?(/~/) then\n case\n when is_after_operator? && (ss.skip(/\\~@/)) then\n action { result(:arg_state, :tTILDE, \"~\") }\n when ss.skip(/\\~/) then\n action { result(:arg_state, :tTILDE, \"~\") }\n end # group /~/\n when ss.match?(/\\\\/) then\n case\n when ss.skip(/\\\\\\r?\\n/) then\n action { self.lineno += 1; self.space_seen = true; next }\n when ss.skip(/\\\\/) then\n action { rb_compile_error \"bare backslash only allowed before newline\" }\n end # group /\\\\/\n when text = ss.scan(/\\%/) then\n process_percent text\n when ss.match?(/\\$/) then\n case\n when text = ss.scan(/\\$_\\w+/) then\n process_gvar text\n when text = ss.scan(/\\$_/) then\n process_gvar text\n when text = ss.scan(/\\$[~*$?!@\\/\\\\;,.=:<>\\\"]|\\$-\\w?/) then\n process_gvar text\n when in_fname? && (text = ss.scan(/\\$([\\&\\`\\'\\+])/)) then\n process_gvar text\n when text = ss.scan(/\\$([\\&\\`\\'\\+])/) then\n process_backref text\n when in_fname? && (text = ss.scan(/\\$([1-9]\\d*)/)) then\n process_gvar text\n when text = ss.scan(/\\$([1-9]\\d*)/) then\n process_nthref text\n when text = ss.scan(/\\$0/) then\n process_gvar text\n when text = ss.scan(/\\$[^[:ascii:]]+/) then\n process_gvar text\n when text = ss.scan(/\\$\\W|\\$\\z/) then\n process_gvar_oddity text\n when text = ss.scan(/\\$\\w+/) then\n process_gvar text\n end # group /\\$/\n when text = ss.scan(/\\_/) then\n process_underscore text\n when text = ss.scan(/#{IDENT}/o) then\n process_token text\n when ss.skip(/\\004|\\032|\\000|\\Z/) then\n action { [RubyLexer::EOF, RubyLexer::EOF] }\n when text = ss.scan(/./) then\n action { rb_compile_error \"Invalid char #{text.inspect} in expression\" }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n else\n raise ScanError, \"undefined state at #{location}: '#{state}'\"\n end # token = case state\n\n next unless token # allow functions to trigger redo w/ nil\n end # while\n\n raise LexerError, \"bad lexical result at #{location}: #{token.inspect}\" unless\n token.nil? || (Array === token && token.size >= 2)\n\n # auto-switch state\n self.state = token.last if token && token.first == :state\n\n token\n end",
"def next_token\n\n token = nil\n\n until ss.eos? or token do\n if ss.peek(1) == \"\\n\"\n self.lineno += 1\n # line starts 1 position after the newline\n self.start_of_current_line_pos = ss.pos + 1\n end\n self.old_pos = ss.pos\n token =\n case state\n when nil then\n case\n when ss.skip(/[ \\t]+/) then\n # do nothing\n when ss.skip(/\\/\\/[^\\r\\n]*/) then\n # do nothing\n when text = ss.scan(/\\r|\\n/) then\n newline text\n when text = ss.scan(/[!=<>]=?/) then\n action { [:SPECIAL, text] }\n when text = ss.scan(/[(){},;.\\-+\\/*]/) then\n action { [:SPECIAL, text] }\n when text = ss.scan(/#{DIGIT}+(\\.#{DIGIT}+)?/) then\n action { [:NUMBER, text] }\n when text = ss.scan(/nil/) then\n action { [:NIL, text] }\n when text = ss.scan(/false/) then\n action { [:FALSE, text] }\n when text = ss.scan(/true/) then\n action { [:TRUE, text] }\n when text = ss.scan(/#{ALPHA}(#{ALPHA}|#{DIGIT})*/) then\n action { [:IDENTIFIER, text] }\n when ss.skip(/\"\"/) then\n action { [:STRING, '\"\"'] }\n when ss.skip(/\"/) then\n [:state, :IN_STRING]\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n when :IN_STRING then\n case\n when text = ss.scan(/[^\"]+/) then\n action { [:STRING, \"\\\"#{text}\\\"\"] }\n when ss.skip(/\"/) then\n [:state, nil]\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n else\n raise ScanError, \"undefined state at #{location}: '#{state}'\"\n end # token = case state\n\n next unless token # allow functions to trigger redo w/ nil\n end # while\n\n raise LexerError, \"bad lexical result at #{location}: #{token.inspect}\" unless\n token.nil? || (Array === token && token.size >= 2)\n\n # auto-switch state\n self.state = token.last if token && token.first == :state\n\n token\n end",
"def next_token\n return if @scanner.eos?\n\n if @scanner.scan(SKIP_PATTERN)\n @column += @scanner[:before].length\n\n new_lines = @scanner[:new_line].delete(\"\\r\")\n unless new_lines.empty?\n @lineno += new_lines.length\n @column = 0\n end\n\n @column += @scanner[:after].length\n end\n\n token =\n case\n when try_match(REFERENCE_PATTERN)\n Token.new :REFERENCE, @scanner[:identifier], @lineno, @column\n when try_match(PATH_PATTERN)\n Token.new :PATH, @scanner[:identifier], @lineno, @column\n when try_match(FILTER_PATTERN) && @scanner.check(OPEN_PAREN_PATTERN)\n Token.new :FILTER, \"?\", @lineno, @column\n when try_match(OPEN_BRACKET_PATTERN)\n @state_stack.push Token.new :OPEN_BRACKET, \"[\", @lineno, @column\n @state_stack.last\n when try_match(OPEN_PAREN_PATTERN)\n @state_stack.push Token.new :OPEN_PAREN, \"(\", @lineno, @column\n @state_stack.last\n when try_match(CLOSE_BRACKET_PATTERN)\n last = @state_stack.pop\n unless last\n raise TokenizeError.unexpected(\"]\", @lineno, @column)\n end\n unless last.type == :OPEN_BRACKET\n raise TokenizeError.unbalanced(\"[\", last.lineno, last.column)\n end\n Token.new :CLOSE_BRACKET, \"]\", @lineno, @column\n when try_match(CLOSE_PAREN_PATTERN)\n last = @state_stack.pop\n unless last\n raise TokenizeError.unexpected(\")\", @lineno, @column)\n end\n unless last.type == :OPEN_PAREN\n raise TokenizeError.unbalanced(\"(\", last.lineno, last.column)\n end\n Token.new :CLOSE_PAREN, \")\", @lineno, @column\n when try_match(SELF_PATTERN)\n Token.new :SELF, \"@\", @lineno, @column\n when try_match(NUMBER_PATTERN)\n Token.new :NUMBER, BigDecimal.new(@last_captured), @lineno, @column\n when try_match(STRING_PATTERN)\n Token.new :STRING, @scanner[:str], @lineno, @column\n when try_match(TRUE_PATTERN)\n Token.new :BOOLEAN, true, @lineno, @column\n when try_match(FALSE_PATTERN)\n Token.new :BOOLEAN, false, @lineno, @column\n when try_match(COLON_PATTERN)\n Token.new :COLON, \":\", @lineno, @column\n when try_match(COMMA_PATTERN)\n Token.new :COMMA, \",\", @lineno, @column\n when try_match(ADD_PATTERN)\n Token.new :ADD, \"+\", @lineno, @column\n when try_match(SUBTRACT_PATTERN)\n case @tokens.last&.type\n when nil, :OPEN_PAREN, :OPEN_BRACKET, :COMMA, :COLON, :POW, :MOD, :ADD, :SUBTRACT, :MULTIPLY, :DIVIDE\n if @scanner.check(NUMBER_PATTERN) ||\n @scanner.check(REFERENCE_PATTERN) ||\n @scanner.check(SUBTRACT_PATTERN) ||\n @scanner.check(OPEN_PAREN_PATTERN)\n Token.new :UMINUS, \"-\", @lineno, @column\n else\n raise TokenizeError.unexpected(\"-\", @lineno, @column)\n end\n else\n Token.new :SUBTRACT, \"-\", @lineno, @column\n end\n when try_match(MULTIPLY_PATTERN)\n Token.new :MULTIPLY, \"*\", @lineno, @column\n when try_match(DIVIDE_PATTERN)\n Token.new :DIVIDE, \"/\", @lineno, @column\n when try_match(POW_PATTERN)\n Token.new :POW, \"^\", @lineno, @column\n when try_match(MOD_PATTERN)\n Token.new :MOD, \"%\", @lineno, @column\n when try_match(EQUAL_TO_PATTERN)\n Token.new :EQUAL_TO, \"==\", @lineno, @column\n when try_match(NOT_EQUAL_TO_PATTERN)\n Token.new :NOT_EQUAL_TO, \"!=\", @lineno, @column\n when try_match(GREATER_THAN_OR_EQUAL_TO_PATTERN)\n Token.new :GREATER_THAN_OR_EQUAL_TO, \">=\", @lineno, @column\n when try_match(GREATER_THAN_PATTERN)\n Token.new :GREATER_THAN, \">\", @lineno, @column\n when try_match(LESS_THAN_OR_EQUAL_TO_PATTERN)\n Token.new :LESS_THAN_OR_EQUAL_TO, \"<=\", @lineno, @column\n when try_match(LESS_THAN_PATTERN)\n Token.new :LESS_THAN, \"<\", @lineno, @column\n when try_match(AND_PATTERN)\n Token.new :AND, \"&&\", @lineno, @column\n when try_match(OR_PATTERN)\n Token.new :OR, \"||\", @lineno, @column\n when try_match(NOT_PATTERN)\n Token.new :NOT, \"!\", @lineno, @column\n when try_match(INTERSECT_PATTERN)\n Token.new :INTERSECT, \"&\", @lineno, @column\n when try_match(UNION_PATTERN)\n Token.new :UNION, \"|\", @lineno, @column\n when try_match(IDENTIFIER_PATTERN) && @scanner.check(OPEN_PAREN_PATTERN)\n unless @scanner.check(OPEN_PAREN_PATTERN)\n raise TokenizeError.unexpected(@scanner.peek(7), @lineno, @column)\n end\n Token.new :FUNCTION, @last_captured, @lineno, @column\n else\n raise TokenizeError.unexpected(@scanner.peek(7), @lineno, @column)\n end\n\n @column += @last_captured.length\n @tokens << token\n\n token\n end",
"def next_token\n\n token = nil\n\n until ss.eos? or token do\n token =\n case state\n when nil then\n case\n when ss.skip(/\\s+/) then\n # do nothing\n when ss.skip(/:(#{SYMBOL_NAME})/o) then\n action { emit :tSYMBOL, &:to_sym }\n when ss.skip(/\"(.+?)\"/) then\n action { emit :tSTRING }\n when ss.skip(/[-+]?\\d+\\.\\d+/) then\n action { emit :tNUMBER, &:to_f }\n when ss.skip(/[-+]?\\d+/) then\n action { emit :tNUMBER, &:to_i }\n when ss.skip(/#{Regexp.union(\n %w\"( ) { | } [ ] < > $ ! ^ ` ... + * ? ,\"\n )}/o) then\n action { emit ss.matched, &:to_sym }\n when ss.skip(/#{REGEXP}/o) then\n action { emit_regexp }\n when ss.skip(/%?(#{CONST_NAME})/o) then\n action { emit :tPARAM_CONST }\n when ss.skip(/%([a-z_]+)/) then\n action { emit :tPARAM_NAMED }\n when ss.skip(/%(\\d*)/) then\n action { emit(:tPARAM_NUMBER) { |s| s.empty? ? 1 : s.to_i } } # Map `%` to `%1`\n when ss.skip(/_(#{IDENTIFIER})/o) then\n action { emit :tUNIFY }\n when ss.skip(/_/o) then\n action { emit :tWILDCARD }\n when ss.skip(/\\#(#{CALL})/o) then\n action { @state = :ARG; emit :tFUNCTION_CALL, &:to_sym }\n when ss.skip(/#{IDENTIFIER}\\?/o) then\n action { @state = :ARG; emit :tPREDICATE, &:to_sym }\n when ss.skip(/#{NODE_TYPE}/o) then\n action { emit :tNODE_TYPE, &:to_sym }\n when ss.skip(/\\#.*/) then\n action { emit_comment }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n when :ARG then\n case\n when ss.skip(/\\(/) then\n action { @state = nil; emit :tARG_LIST }\n when ss.skip(//) then\n action { @state = nil }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n else\n raise ScanError, \"undefined state at #{location}: '#{state}'\"\n end # token = case state\n\n next unless token # allow functions to trigger redo w/ nil\n end # while\n\n raise LexerError, \"bad lexical result at #{location}: #{token.inspect}\" unless\n token.nil? || (Array === token && token.size >= 2)\n\n # auto-switch state\n self.state = token.last if token && token.first == :state\n\n token\n end",
"def tokens!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 59)\n\n type = TOKENS\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 596:4: 'tokens' WS_LOOP '{'\n match(\"tokens\")\n ws_loop!\n match(?{)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 59)\n\n end",
"def do_parse\n while token = next_token do\n type, *vals = token\n\n send \"lex_#{type}\", *vals\n end\n end",
"def next_token\n @current_token = @lexer.next_token\n end",
"def next_token\n tokens.shift\n end",
"def getNextToken\n \n #Check if the end has been reached\n if @currentChar == nil\n return\n end\n if @currentChar.match(/\\s/) != nil\n skipWhitespaces\n end\n \n if @currentChar == '%'\n comment\n if @currentChar.match(/\\s/) != nil\n skipWhitespaces\n end\n end \n \n if @currentChar.match(/[A-Za-z0-9_]/) != nil\n return Token.new(NAME, name)\n end\n \n if @currentChar == \"\\\"\"\n return Token.new(STRING, string)\n end\n \n if @currentChar == '{'\n advance\n return Token.new(OPENING_BRACE,'{')\n end\n \n if @currentChar == '}'\n advance\n return Token.new(CLOSING_BRACE,'}')\n end\n \n if @currentChar == '['\n advance\n return Token.new(OPENING_BRACKET,'[')\n end\n \n if @currentChar == ']'\n advance\n return Token.new(CLOSING_BRACKET,']')\n end\n \n if @currentChar == ':'\n advance\n return Token.new(COLON,':')\n end\n \n if @currentChar == '*'\n advance\n return Token.new(ASTERIX,'*')\n end\n \n if @currentChar == '='\n advance\n return Token.new(EQUALS,'=')\n end\n \n if @currentChar == ';'\n advance\n return Token.new(SEMICOLON,';')\n end\n \n if @currentChar == '^'\n advance\n return Token.new(CIRCUMFLEX,'^')\n end\n \n if @currentChar == '+'\n advance\n return Token.new(PLUS,'+')\n end\n if @currentChar == '('\n advance\n return Token.new(OPENING_PARANTHESIS,'(')\n end\n if @currentChar == ')'\n advance\n return Token.new(CLOSING_PARANTHESIS,')')\n end\n if @currentChar == '.'\n advance\n return Token.new(DOT,'.')\n end\n if @currentChar == '#'\n advance\n return Token.new(HASH,'#')\n end\n if @currentChar == ','\n advance\n return Token.new(COMMA,',')\n end\n error\n \n return Token.new(EOF,'EOF') \n \n end",
"def next_token\n \n # Early return if there is nothing to be read. This means we've reached the end of the file.\n \n unless @file[@pos]\n return nil\n end\n \n # This is the token that will be returned.\n token = Compiler::Token.new\n \n # Initializes a new instance of the automaton.\n automaton = Automaton.new\n \n # Will be set inside the loop, if necessary.\n increment_next = false\n \n # Will be set inside the loop. Marks whether we've reached the end of the file.\n eof = false\n \n # Build a new token while we don't have a new word yet and isn't in the failed state\n while ((automaton.state != :A || automaton.word.empty?) && automaton.state != :failed)\n \n # The next input for the automaton\n char = @file[@pos]\n \n if char\n \n # Moves the pointer to the next char\n @pos += 1\n \n automaton.transition(char)\n \n # While the automaton hasn't started to build a new word yet, increments the line and column numbers.\n # In this phase, we're just skipping blank characters\n if automaton.word.empty?\n if increment_next\n if char == \"\\n\"\n increment_next = true\n else\n increment_next = false\n end\n @line += 1\n @column = 0\n elsif char == \"\\n\"\n @column += 1\n increment_next = true\n else\n @column += 1\n end\n end\n \n else\n eof = true\n puts \"breaking\"\n break\n end\n end\n \n \n \n if eof\n automaton.transition(\"\\n\")\n else\n @pos -= 1\n end\n \n if (automaton.type == :identifier) && (Compiler.reserved_words.is_reserved?(automaton.word))\n token.type = :reserved_word\n else\n token.type = automaton.type\n end\n \n token.value = automaton.word\n token.line = @line\n token.column = @column\n \n return token\n \n end",
"def token_ref!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 56)\n\n type = TOKEN_REF\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 581:4: 'A' .. 'Z' ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )*\n match_range(?A, ?Z)\n # at line 581:13: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )*\n loop do #loop 16\n alt_16 = 2\n look_16_0 = @input.peek(1)\n\n if (look_16_0.between?(?0, ?9) || look_16_0.between?(?A, ?Z) || look_16_0 == ?_ || look_16_0.between?(?a, ?z)) \n alt_16 = 1\n\n end\n case alt_16\n when 1\n # at line \n if @input.peek(1).between?(?0, ?9) || @input.peek(1).between?(?A, ?Z) || @input.peek(1) == ?_ || @input.peek(1).between?(?a, ?z)\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n else\n break #loop 16\n end\n end\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 56)\n\n end",
"def next_token()\n raise LexicalError.new(\"No input text was provided.\", LexemePosition::at_start) if token_recognition_state == :waiting_for_input\n\n\t\tif queue.empty?\n unless token_recognition_state == :ready\n error_message = \"#{__method__} may not be called when state is #{token_recognition_state}\"\n raise LexerSetupError.new(error_message)\n end\n\n enqueue_next_tokens() # Retrieve token(s) from the input text & enqueue them\n end\n theToken = queue.dequeue()\n\t\treturn theToken\n end",
"def next()\n if @ss.scan_until(token_re)\n term = @ss.matched\n term_end = @ss.pos\n term_start = term_end - term.size\n else\n return nil\n end\n\n return Token.new(normalize(term), term_start, term_end)\n end",
"def read_next_token(token_class)\n if @next_token\n return @next_token\n else\n # check for a match on the specified class first\n if match(token_class)\n return @next_token\n else\n # now check all the tokens for a match\n Taxonifi::Splitter::Tokens.send(@token_list).each {|t|\n return @next_token if match(t)\n }\n end\n # no match, either end of string or lex-error\n if @input != ''\n raise(Taxonifi::Splitter::SplitterError, \"Lexer Error, unknown token at |#{@input[0..20]}...\", caller)\n else\n return nil\n end\n end\n end",
"def advance\n if @token_queue.any?\n return @token_queue.shift\n end\n\n # Ugly, but dependent on Ragel output. Consider refactoring it somehow.\n _lex_trans_keys = self.class.send :_lex_trans_keys\n _lex_key_spans = self.class.send :_lex_key_spans\n _lex_index_offsets = self.class.send :_lex_index_offsets\n _lex_indicies = self.class.send :_lex_indicies\n _lex_trans_targs = self.class.send :_lex_trans_targs\n _lex_trans_actions = self.class.send :_lex_trans_actions\n _lex_to_state_actions = self.class.send :_lex_to_state_actions\n _lex_from_state_actions = self.class.send :_lex_from_state_actions\n _lex_eof_trans = self.class.send :_lex_eof_trans\n\n p, pe, eof = @p, @source.length + 1, @source.length + 1\n\n @command_state = (@cs == self.class.lex_en_expr_value ||\n @cs == self.class.lex_en_line_begin)\n\n \n# line 10604 \"lib/parser/lexer.rb\"\nbegin\n\ttestEof = false\n\t_slen, _trans, _keys, _inds, _acts, _nacts = nil\n\t_goto_level = 0\n\t_resume = 10\n\t_eof_trans = 15\n\t_again = 20\n\t_test_eof = 30\n\t_out = 40\n\twhile true\n\tif _goto_level <= 0\n\tif p == pe\n\t\t_goto_level = _test_eof\n\t\tnext\n\tend\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _resume\n\tcase _lex_from_state_actions[ @cs] \n\twhen 74 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = p\n\t\tend\n# line 10632 \"lib/parser/lexer.rb\"\n\tend\n\t_keys = @cs << 1\n\t_inds = _lex_index_offsets[ @cs]\n\t_slen = _lex_key_spans[ @cs]\n\t_trans = if ( _slen > 0 && \n\t\t\t_lex_trans_keys[_keys] <= ( (@source_pts[p] || 0)) && \n\t\t\t( (@source_pts[p] || 0)) <= _lex_trans_keys[_keys + 1] \n\t\t ) then\n\t\t\t_lex_indicies[ _inds + ( (@source_pts[p] || 0)) - _lex_trans_keys[_keys] ] \n\t\t else \n\t\t\t_lex_indicies[ _inds + _slen ]\n\t\t end\n\tend\n\tif _goto_level <= _eof_trans\n\t @cs = _lex_trans_targs[_trans]\n\tif _lex_trans_actions[_trans] != 0\n\tcase _lex_trans_actions[_trans]\n\twhen 22 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 52 then\n# line 752 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 23 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n\twhen 63 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n\twhen 66 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 253 then\n# line 1115 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 30 then\n# line 1364 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 32 then\n# line 1380 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 34 then\n# line 1408 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 49 then\n# line 1592 \"lib/parser/lexer.rl\"\n\t\tbegin\n @heredoc_e = p \t\tend\n\twhen 322 then\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 389 then\n# line 1902 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n\twhen 383 then\n# line 1903 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n\twhen 386 then\n# line 1904 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n\twhen 380 then\n# line 1905 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n\twhen 395 then\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n\twhen 363 then\n# line 1907 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n\twhen 375 then\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 370 then\n# line 1965 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 368 then\n# line 1966 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 7 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 90 then\n# line 983 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n literal.flush_string\n literal.extend_content\n\n emit(:tSTRING_DBEG, '#{')\n\n if literal.heredoc?\n literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n literal.start_interp_brace\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 731\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 5 then\n# line 931 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n literal.flush_string\n literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 86 then\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 85 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 91 then\n# line 931 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.flush_string\n literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 88 then\n# line 918 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 89 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 6 then\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 4 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 117 then\n# line 983 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n literal.flush_string\n literal.extend_content\n\n emit(:tSTRING_DBEG, '#{')\n\n if literal.heredoc?\n literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n literal.start_interp_brace\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 731\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 10 then\n# line 931 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n literal.flush_string\n literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 114 then\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 113 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 118 then\n# line 931 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.flush_string\n literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 116 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 11 then\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 9 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 141 then\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 140 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 143 then\n# line 918 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 144 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 147 then\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 146 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 149 then\n# line 801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = @source[@ts...@te]\n\n if !literal.heredoc? && literal.nest_and_try_closing(string, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 152 then\n# line 1046 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tREGEXP_OPT, tok(@ts, @te - 1), @ts, @te - 1)\n p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 153 then\n# line 1034 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n unknown_options = tok.scan(/[^imxouesn]/)\n if unknown_options.any?\n diagnostic :error, :regexp_options,\n { :options => unknown_options.join }\n end\n\n emit(:tREGEXP_OPT)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 12 then\n# line 1174 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 154 then\n# line 1174 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 156 then\n# line 1187 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@@[0-9]/\n diagnostic :error, :cvar_name, { :name => tok }\n end\n\n emit(:tCVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 155 then\n# line 1197 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@[0-9]/\n diagnostic :error, :ivar_name, { :name => tok }\n end\n\n emit(:tIVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 177 then\n# line 1218 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(KEYWORDS_BEGIN[tok]);\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 163 then\n# line 1226 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tIDENTIFIER)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 14 then\n# line 1230 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n @cs = 739; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 160 then\n# line 1239 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 172 then\n# line 1243 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 159 then\n# line 1251 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 158 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 176 then\n# line 1218 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(KEYWORDS_BEGIN[tok]);\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 173 then\n# line 1222 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 175 then\n# line 1226 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 170 then\n# line 1230 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n @cs = 739; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 166 then\n# line 1239 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 171 then\n# line 1246 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 164 then\n# line 1248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 169 then\n# line 1251 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 13 then\n# line 1251 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 162 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 25 then\n\tbegin begin p = (( @te))-1; end\n emit(KEYWORDS_BEGIN[tok]);\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 26 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 27 then\n\tbegin begin p = (( @te))-1; end\n emit(:tIDENTIFIER)\n @cs = 422; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 16 then\n# line 1263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tLABEL, tok(@ts, @te - 1))\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 179 then\n# line 1269 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 178 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 181 then\n# line 1266 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 180 then\n# line 1269 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 15 then\n# line 1269 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 187 then\n# line 1295 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 186 then\n# line 1301 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 185 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 197 then\n# line 1280 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 188 then\n# line 1284 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 193 then\n# line 1295 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 191 then\n# line 1298 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 196 then\n# line 1301 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 201 then\n# line 1359 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 204 then\n# line 1368 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok(tm, tm + 1) == '/'\n # Ambiguous regexp literal.\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 210 then\n# line 1392 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 18 then\n# line 1400 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 212 then\n# line 1409 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 33 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 198 then\n# line 1432 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 199 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 211 then\n# line 1359 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 207 then\n# line 1381 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 209 then\n# line 1397 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 203 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 202 then\n# line 1423 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 220 then\n# line 1432 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 19 then\n# line 1423 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 35 then\n# line 1432 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 17 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 51 then\n\tbegin begin p = (( @te))-1; end\n\n if tok(tm, tm + 1) == '/'\n # Ambiguous regexp literal.\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 52 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 57 then\n\tbegin begin p = (( @te))-1; end\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\telse\n\tbegin begin p = (( @te))-1; end\nend\nend \n\t\t\tend\n\twhen 37 then\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 451\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 223 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 224 then\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 451\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 38 then\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 451\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 36 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 64 then\n\tbegin begin p = (( @te))-1; end\n\n if @cond.active?\n emit(:kDO_COND, 'do', @te - 2, @te)\n else\n emit(:kDO, 'do', @te - 2, @te)\n end\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 65 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 451\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 234 then\n# line 1495 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_do(true)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 227 then\n# line 1501 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 228 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 229 then\n# line 1498 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 232 then\n# line 1501 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 238 then\n# line 1525 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 237 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 246 then\n# line 1517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 240 then\n# line 1519 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 244 then\n# line 1525 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 239 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 72 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 73 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 274 then\n# line 1542 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1;\n if tok.start_with? '-'\n emit(:tUMINUS_NUM, '-', @ts, @ts + 1)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 275 then\n# line 1562 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type = delimiter = tok[0].chr\n p = p - 1; \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 269 then\n# line 1569 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok[0].chr, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 45 then\n# line 1576 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 276 then\n# line 1616 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 48 then\n# line 1630 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 289 then\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 291 then\n# line 1658 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[tok[1]]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 290 then\n# line 1668 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 277 then\n# line 1704 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 42 then\n# line 1724 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1;\n\n if version?(18)\n ident = tok(@ts, @te - 2)\n\n emit((tok[0] =~ /[A-Z]/) ? :tCONSTANT : :tIDENTIFIER,\n ident, @ts, @te - 2)\n p = p - 1; # continue as a symbol\n\n if !@static_env.nil? && @static_env.declared?(ident)\n @cs = 739;\n else\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 263 then\n# line 1769 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 156\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 46 then\n# line 1782 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 249 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 273 then\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tSTAR)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 270 then\n# line 1576 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 268 then\n# line 1582 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 278 then\n# line 1630 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 293 then\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 288 then\n# line 1668 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 294 then\n# line 1674 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 271 then\n# line 1704 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 316 then\n# line 1159 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 260 then\n# line 1766 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 262 then\n# line 1769 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 156\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 265 then\n# line 1782 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 44 then\n# line 1582 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 53 then\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 51 then\n# line 1668 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 43 then\n# line 1766 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 47 then\n# line 1782 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 41 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 95 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN, @ts, tm)\n p = tm - 1\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 96 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 98 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 99 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 102 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 56 then\n# line 1792 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 325 then\n# line 1801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 324 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 327 then\n# line 1795 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 326 then\n# line 1801 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 55 then\n# line 1801 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 519\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 357 then\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION, @ts, @ts + 2)\n\n @lambda_stack.push @paren_nest\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 70 then\n# line 1849 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:kCLASS, 'class', @ts, @ts + 5)\n emit(:tLSHFT, '<<', @te - 2, @te)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 335 then\n# line 1986 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 58 then\n# line 2004 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 399 then\n# line 2011 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 429; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 349 then\n# line 2038 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 341 then\n# line 2042 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION)\n @cond.lexpop; @cmdarg.lexpop\n\n if %w\"} ]\".include?(tok)\n @cs = 487;\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 354 then\n# line 2056 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tOP_ASGN, tok(@ts, @te - 1))\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 339 then\n# line 2060 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 336 then\n# line 2068 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 338 then\n# line 2081 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 404 then\n# line 2084 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :error, :bare_backslash, nil, range(@ts, @ts + 1)\n p = p - 1;\n end\n\t\tend\n\twhen 334 then\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 333 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 414 then\n# line 1845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 310; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 412 then\n# line 1849 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kCLASS, 'class', @ts, @ts + 5)\n emit(:tLSHFT, '<<', @te - 2, @te)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 411 then\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 360 then\n# line 1936 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 401 then\n# line 1996 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 353 then\n# line 2004 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 305\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 358 then\n# line 2011 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 429; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 406 then\n# line 1159 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 348 then\n# line 2038 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 359 then\n# line 2068 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 346 then\n# line 2075 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 352 then\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 59 then\n# line 1936 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 57 then\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 60 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 110 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'\n emit(:tLAMBEG)\n else # 'do'\n emit(:kDO_LAMBDA)\n end\n else\n if tok == '{'\n emit_table(PUNCTUATION)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 111 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 310; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 112 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class', @ts, @ts + 5)\n emit(:tLSHFT, '<<', @te - 2, @te)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 113 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 114 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 115 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 116 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 117 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739;\n else\n @cs = (arg_or_cmdarg);\n end\n else\n emit_table(KEYWORDS)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 118 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 119 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 121 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 123 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 125 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 129 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 130 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\nend \n\t\t\tend\n\twhen 72 then\n# line 2102 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1;\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 419 then\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 156; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 420 then\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 156; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 71 then\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 156; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 423 then\n# line 2116 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = 156\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 422 then\n# line 2124 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :embedded_document, nil,\n range(@eq_begin_s, @eq_begin_s + '=begin'.length)\n end\n\t\tend\n\twhen 83 then\n# line 2134 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 916\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 2 then\n# line 2138 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 75 then\n# line 2141 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 731\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 76 then\n# line 486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 77 then\n# line 2131 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 82 then\n# line 2134 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 916\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 81 then\n# line 2141 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 731\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 1 then\n# line 2141 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 731\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 69 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 87 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 115 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 142 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 148 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n\twhen 213 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1409 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 205 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 292 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1658 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[tok[1]]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 264 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1769 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 156\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 424 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2116 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = 156\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 421 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 84 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2134 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 916\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 3 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2138 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 377 then\n# line 594 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tRATIONAL, Rational(value)) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 376 then\n# line 595 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tIMAGINARY, Complex(0, value)) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 378 then\n# line 596 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tIMAGINARY, Complex(0, Rational(value))) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 371 then\n# line 600 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |digits| emit(:tIMAGINARY, Complex(0, Float(digits))) } \t\tend\n# line 1968 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 372 then\n# line 604 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |digits| emit(:tRATIONAL, Rational(digits)) } \t\tend\n# line 1968 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 373 then\n# line 605 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |digits| emit(:tIMAGINARY, Complex(0, Rational(digits))) } \t\tend\n# line 1968 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 109 then\n# line 620 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n @escape = lambda do\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n end\n\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 136 then\n# line 620 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n @escape = lambda do\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n end\n\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 311 then\n# line 620 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n @escape = lambda do\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n end\n\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 93 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 120 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 295 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 96 then\n# line 648 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 123 then\n# line 648 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 298 then\n# line 648 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 98 then\n# line 669 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 125 then\n# line 669 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 300 then\n# line 669 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 95 then\n# line 676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 122 then\n# line 676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 297 then\n# line 676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 112 then\n# line 680 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 139 then\n# line 680 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 314 then\n# line 680 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 106 then\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 133 then\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 308 then\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 111 then\n# line 688 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_hex_escape, nil,\n range(@escape_s - 1, p + 2)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 138 then\n# line 688 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_hex_escape, nil,\n range(@escape_s - 1, p + 2)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 313 then\n# line 688 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_hex_escape, nil,\n range(@escape_s - 1, p + 2)\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 105 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 132 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 307 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 108 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :unterminated_unicode, nil,\n range(p - 1, p)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 135 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :unterminated_unicode, nil,\n range(p - 1, p)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 310 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :unterminated_unicode, nil,\n range(p - 1, p)\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 94 then\n# line 746 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 121 then\n# line 746 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 296 then\n# line 746 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 145 then\n# line 752 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 150 then\n# line 752 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 62 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 24 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1364 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 26 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1380 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 28 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1408 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 165 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 184 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1266 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 192 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1298 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 27 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 222 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1423 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 216 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1429 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 233 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1498 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 245 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1519 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 241 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1522 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 261 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1766 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 331 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1795 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 328 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1798 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 156\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 405 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2075 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 347 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2078 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 913\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 78 then\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2131 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 215 then\n# line 955 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n if literal\n literal.start_interp_brace\n end\n \t\tend\n# line 1342 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n emit(:tLCURLY, '{', @te - 1, @te)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 235 then\n# line 955 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n if literal\n literal.start_interp_brace\n end\n \t\tend\n# line 1491 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACE_ARG)\n @cs = 731; end\n\t\tend\n\twhen 323 then\n# line 955 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n if literal\n literal.start_interp_brace\n end\n \t\tend\n# line 1685 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG)\n else\n emit_table(PUNCTUATION_BEGIN)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 417 then\n# line 955 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n if literal\n literal.start_interp_brace\n end\n \t\tend\n# line 1820 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'\n emit(:tLAMBEG)\n else # 'do'\n emit(:kDO_LAMBDA)\n end\n else\n if tok == '{'\n emit_table(PUNCTUATION)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 418 then\n# line 963 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n if literal\n if literal.end_interp_brace_and_try_closing\n if version?(18, 19)\n emit(:tRCURLY, '}', p - 1, p)\n else\n emit(:tSTRING_DEND, '}', p - 1, p)\n end\n\n if literal.saved_herebody_s\n @herebody_s = literal.saved_herebody_s\n end\n\n p = p - 1;\n @cs = (stack_pop);\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n \t\tend\n# line 2042 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n @cond.lexpop; @cmdarg.lexpop\n\n if %w\"} ]\".include?(tok)\n @cs = 487;\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 64 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 67 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 168 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 183 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1266 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 195 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1298 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 218 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1426 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 231 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1498 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 243 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1519 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 267 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1766 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 330 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1795 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 351 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2075 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 80 then\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2131 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 189 then\n# line 1115 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1288 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 279 then\n# line 1115 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 252 then\n# line 1115 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 343 then\n# line 1115 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2018 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 345 then\n# line 1115 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 110 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'\n emit(:tLAMBEG)\n else # 'do'\n emit(:kDO_LAMBDA)\n end\n else\n if tok == '{'\n emit_table(PUNCTUATION)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 111 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 310; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 112 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class', @ts, @ts + 5)\n emit(:tLSHFT, '<<', @te - 2, @te)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 113 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 114 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 115 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 116 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 117 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739;\n else\n @cs = (arg_or_cmdarg);\n end\n else\n emit_table(KEYWORDS)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 118 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 119 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 121 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 123 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 125 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 129 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 130 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\nend \n\t\t\tend\n\twhen 190 then\n# line 1116 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1288 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 280 then\n# line 1116 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 254 then\n# line 1116 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 344 then\n# line 1116 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2018 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 451; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 281 then\n# line 1121 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 255 then\n# line 1121 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 282 then\n# line 1122 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 256 then\n# line 1122 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 286 then\n# line 1123 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 259 then\n# line 1123 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 285 then\n# line 1124 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 258 then\n# line 1124 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 95 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN, @ts, tm)\n p = tm - 1\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 96 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 98 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 99 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 102 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 283 then\n# line 1125 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 257 then\n# line 1125 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 284 then\n# line 1130 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1622 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 400 then\n# line 1135 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2000 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT, tok(@ts, tm), @ts, tm)\n p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 214 then\n# line 1141 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 1336 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '[', @te - 1, @te)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 315 then\n# line 1141 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 1699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 403 then\n# line 1141 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 2064 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 206 then\n# line 1148 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1317 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if version?(18)\n emit(:tLPAREN2, '(', @te - 1, @te)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tLPAREN_ARG, '(', @te - 1, @te)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 219 then\n# line 1148 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1330 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN2)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 225 then\n# line 1148 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1445 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tLPAREN_ARG, '(', @te - 1, @te)\n if version?(18)\n @cs = 731; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 272 then\n# line 1148 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 355 then\n# line 1148 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 2038 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 356 then\n# line 1154 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2042 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n @cond.lexpop; @cmdarg.lexpop\n\n if %w\"} ]\".include?(tok)\n @cs = 487;\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 50 then\n# line 1592 \"lib/parser/lexer.rl\"\n\t\tbegin\n @heredoc_e = p \t\tend\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 287 then\n# line 1593 \"lib/parser/lexer.rl\"\n\t\tbegin\n new_herebody_s = p \t\tend\n# line 1594 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n tok(@ts, @heredoc_e) =~ /^<<(-?)([\"'`]?)(.*)\\2$/\n\n indent = !$1.empty?\n type = '<<' + ($2.empty? ? '\"' : $2)\n delimiter = $3\n\n @cs = (push_literal(type, delimiter, @ts, @heredoc_e, indent));\n\n if @herebody_s.nil?\n @herebody_s = new_herebody_s\n end\n\n p = @herebody_s - 1\n end\n\t\tend\n\twhen 319 then\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1710 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS_BEGIN, @ts, tm)\n p = tm - 1\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 390 then\n# line 1902 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 384 then\n# line 1903 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 387 then\n# line 1904 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 381 then\n# line 1905 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 396 then\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 365 then\n# line 1907 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 397 then\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n\twhen 8 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 174 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1218 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 25;\t\tend\n\twhen 161 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1222 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 26;\t\tend\n\twhen 157 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1226 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 27;\t\tend\n\twhen 20 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1368 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 51;\t\tend\n\twhen 208 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1381 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 52;\t\tend\n\twhen 21 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 57;\t\tend\n\twhen 200 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1423 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 58;\t\tend\n\twhen 226 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1455 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 64;\t\tend\n\twhen 39 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 65;\t\tend\n\twhen 247 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1513 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 72;\t\tend\n\twhen 236 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 250 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1704 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 94;\t\tend\n\twhen 318 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1710 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 95;\t\tend\n\twhen 317 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1716 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 96;\t\tend\n\twhen 54 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 98;\t\tend\n\twhen 248 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1159 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 99;\t\tend\n\twhen 251 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1782 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 102;\t\tend\n\twhen 413 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1820 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 110;\t\tend\n\twhen 408 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 111;\t\tend\n\twhen 416 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1855 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 113;\t\tend\n\twhen 409 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 114;\t\tend\n\twhen 410 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1864 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 115;\t\tend\n\twhen 415 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1868 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 116;\t\tend\n\twhen 407 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1879 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 117;\t\tend\n\twhen 402 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1895 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 118;\t\tend\n\twhen 337 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 119;\t\tend\n\twhen 367 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1953 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 122;\t\tend\n\twhen 61 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1968 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 123;\t\tend\n\twhen 340 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1996 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 125;\t\tend\n\twhen 332 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1159 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 129;\t\tend\n\twhen 342 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2018 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 130;\t\tend\n\twhen 151 then\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(literal.str_s, literal.str_s + 1)\n end\n\n if literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, '')\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, '')\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n12\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if literal.words? && !eof_codepoint?(@source_pts[p])\n literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n literal.extend_string tok, @ts, @te\n literal.flush_string\n end\n end\n\t\tend\n# line 752 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 99 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 126 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 301 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 104 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 131 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 306 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 103 then\n# line 664 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 130 then\n# line 664 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 305 then\n# line 664 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 101 then\n# line 669 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 128 then\n# line 669 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 303 then\n# line 669 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 97 then\n# line 670 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 124 then\n# line 670 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 299 then\n# line 670 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 110 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 620 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n @escape = lambda do\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n end\n\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 137 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 620 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n @escape = lambda do\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n end\n\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 312 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 620 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n @escape = lambda do\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n end\n\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 107 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :unterminated_unicode, nil,\n range(p - 1, p)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 134 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :unterminated_unicode, nil,\n range(p - 1, p)\n end\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 309 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s - 1, p)\n end\n \t\tend\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = lambda do\n diagnostic :fatal, :unterminated_unicode, nil,\n range(p - 1, p)\n end\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 92 then\n# line 752 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 746 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 119 then\n# line 752 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 746 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 65 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 167 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 182 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1266 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 194 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1298 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 217 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1426 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 739\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 230 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1498 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 242 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1519 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 266 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1766 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 329 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1795 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 350 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2075 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 79 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2131 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 392 then\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n\twhen 362 then\n# line 1907 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n\twhen 374 then\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 369 then\n# line 1965 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 599 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |digits| emit(:tFLOAT, Float(digits)) } \t\tend\n# line 1968 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 366 then\n# line 1966 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 599 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |digits| emit(:tFLOAT, Float(digits)) } \t\tend\n# line 1968 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 221 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 460 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1423 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 58;\t\tend\n\twhen 29 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 57;\t\tend\n\twhen 40 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 65;\t\tend\n\twhen 68 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1074 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1849 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 112;\t\tend\n\twhen 31 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1380 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1381 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 52;\t\tend\n\twhen 321 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1754 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 98;\t\tend\n\twhen 320 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1159 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 99;\t\tend\n\twhen 393 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 119;\t\tend\n\twhen 102 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 129 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 304 then\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n char = @source[p - 1].chr\n @escape = ESCAPES.fetch(char, char)\n \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 100 then\n# line 670 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 127 then\n# line 670 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if literal.nest_and_try_closing('\\\\', @ts, @ts + 1)\n # If the literal is actually closed by the backslash,\n # rewind the input prior to consuming the escape sequence.\n p = @escape_s - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Get the first character after the backslash.\n escaped_char = @source[@escape_s].chr\n\n if literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if literal.regexp? && escaped_char == '\\\\'\n # Regular expressions should include backslashes in their escaped\n # form.\n literal.extend_string(tok, @ts, @te)\n else\n literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n # Two things to consider here.\n #\n # 1. The `escape' rule should be pure and so won't raise any\n # errors by itself. Instead, it stores them in lambdas.\n #\n # 2. Non-interpolated literals do not go through the aforementioned\n # rule. As \\\\ and \\' (and variants) are munged, the full token\n # should always be written for such literals.\n\n @escape.call if @escape.respond_to? :call\n\n if literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n literal.extend_string(tok.gsub(\"\\\\\\n\", ''), @ts, @te)\n else\n literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n end\n\t\tend\n\twhen 302 then\n# line 670 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source[p - 1].chr \t\tend\n# line 654 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 658 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Show an error if memorized.\n @escape.call if @escape.respond_to? :call\n\n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value[0].ord)\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 739; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 388 then\n# line 1902 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 382 then\n# line 1903 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 385 then\n# line 1904 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 379 then\n# line 1905 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 391 then\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 361 then\n# line 1907 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1909 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'\n diagnostic :error, :trailing_in_number, { :character => '_' },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = \"0\"\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base))\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 25 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 792 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1380 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1381 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 52;\t\tend\n\twhen 398 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1941 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 121;\t\tend\n\twhen 394 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1941 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 121;\t\tend\n\twhen 364 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1907 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 1908 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 593 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |value| emit(:tINTEGER, value) } \t\tend\n# line 1941 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 121;\t\tend\n# line 20641 \"lib/parser/lexer.rb\"\n\tend\n\tend\n\tend\n\tif _goto_level <= _again\n\tcase _lex_to_state_actions[ @cs] \n\twhen 73 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = nil;\t\tend\n# line 20651 \"lib/parser/lexer.rb\"\n\tend\n\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tp += 1\n\tif p != pe\n\t\t_goto_level = _resume\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _test_eof\n\tif p == eof\n\tif _lex_eof_trans[ @cs] > 0\n\t\t_trans = _lex_eof_trans[ @cs] - 1;\n\t\t_goto_level = _eof_trans\n\t\tnext;\n\tend\n\tend\n\n\tend\n\tif _goto_level <= _out\n\t\tbreak\n\tend\nend\n\tend\n\n# line 261 \"lib/parser/lexer.rl\"\n # %\n\n @p = p\n\n if @token_queue.any?\n @token_queue.shift\n elsif @cs == self.class.lex_error\n [ false, [ '$error', range(p - 1, p) ] ]\n else\n [ false, [ '$eof', range(p, p) ] ]\n end\n end",
"def lex_start; end",
"def lex_start; end",
"def lex_start; end",
"def lex_start; end",
"def next_token\n @tokens.shift\n end",
"def tokens_spec\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 2)\n return_value = TokensSpecReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __TOKENS13__ = nil\n char_literal15 = nil\n token_spec14 = nil\n\n tree_for_TOKENS13 = nil\n tree_for_char_literal15 = nil\n stream_TOKENS = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token TOKENS\")\n stream_T__72 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__72\")\n stream_token_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule token_spec\")\n begin\n # at line 110:4: TOKENS ( token_spec )+ '}'\n __TOKENS13__ = match(TOKENS, TOKENS_FOLLOWING_TOKENS_IN_tokens_spec_467) \n if @state.backtracking == 0\n stream_TOKENS.add(__TOKENS13__)\n end\n # at file 110:11: ( token_spec )+\n match_count_8 = 0\n loop do\n alt_8 = 2\n look_8_0 = @input.peek(1)\n\n if (look_8_0 == TOKEN_REF) \n alt_8 = 1\n\n end\n case alt_8\n when 1\n # at line 110:11: token_spec\n @state.following.push(TOKENS_FOLLOWING_token_spec_IN_tokens_spec_469)\n token_spec14 = token_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_token_spec.add(token_spec14.tree)\n end\n\n else\n match_count_8 > 0 and break\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n\n eee = EarlyExit(8)\n\n\n raise eee\n end\n match_count_8 += 1\n end\n\n char_literal15 = match(T__72, TOKENS_FOLLOWING_T__72_IN_tokens_spec_472) \n if @state.backtracking == 0\n stream_T__72.add(char_literal15)\n end\n # AST Rewrite\n # elements: token_spec, TOKENS\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 110:27: -> ^( TOKENS ( token_spec )+ )\n # at line 110:30: ^( TOKENS ( token_spec )+ )\n root_1 = @adaptor.create_flat_list!\n root_1 = @adaptor.become_root(stream_TOKENS.next_node, root_1)\n\n # at line 110:39: ( token_spec )+\n unless stream_token_spec.has_next?\n raise ANTLR3::RewriteEarlyExit\n end\n\n while stream_token_spec.has_next?\n @adaptor.add_child(root_1, stream_token_spec.next_tree)\n\n end\n\n stream_token_spec.reset\n\n @adaptor.add_child(root_0, root_1)\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing(root_0)\n @adaptor.set_token_boundaries(return_value.tree, return_value.start, return_value.stop)\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node!(@input, return_value.start, @input.look(-1), re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 2)\n\n end\n \n return return_value\n end",
"def consume\n @current = @tokens[@pos]\n @pos += 1 if @current\n @current\n end",
"def of\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 43 )\n of_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return \n end\n # at line 330:5: ( 'O' | 'o' ) ( 'F' | 'f' )\n if @input.peek( 1 ).between?( T__18, T__19 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__30, T__31 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 43 )\n memoize( __method__, of_start_index, success ) if @state.backtracking > 0\n\n end\n \n return \n end",
"def tokens_spec\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n return_value = TokensSpecReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __TOKENS13__ = nil\n char_literal15 = nil\n token_spec14 = nil\n\n tree_for_TOKENS13 = nil\n tree_for_char_literal15 = nil\n stream_TOKENS = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token TOKENS\" )\n stream_T__72 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__72\" )\n stream_token_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule token_spec\" )\n begin\n # at line 101:4: TOKENS ( token_spec )+ '}'\n __TOKENS13__ = match( TOKENS, TOKENS_FOLLOWING_TOKENS_IN_tokens_spec_462 )\n if @state.backtracking == 0\n stream_TOKENS.add( __TOKENS13__ )\n end\n # at file 101:11: ( token_spec )+\n match_count_8 = 0\n while true\n alt_8 = 2\n look_8_0 = @input.peek( 1 )\n\n if ( look_8_0 == TOKEN_REF )\n alt_8 = 1\n\n end\n case alt_8\n when 1\n # at line 101:11: token_spec\n @state.following.push( TOKENS_FOLLOWING_token_spec_IN_tokens_spec_464 )\n token_spec14 = token_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_token_spec.add( token_spec14.tree )\n end\n\n else\n match_count_8 > 0 and break\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n eee = EarlyExit(8)\n\n\n raise eee\n end\n match_count_8 += 1\n end\n\n char_literal15 = match( T__72, TOKENS_FOLLOWING_T__72_IN_tokens_spec_467 )\n if @state.backtracking == 0\n stream_T__72.add( char_literal15 )\n end\n # AST Rewrite\n # elements: token_spec, TOKENS\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 101:27: -> ^( TOKENS ( token_spec )+ )\n # at line 101:30: ^( TOKENS ( token_spec )+ )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( stream_TOKENS.next_node, root_1 )\n\n # at line 101:39: ( token_spec )+\n stream_token_spec.has_next? or raise ANTLR3::RewriteEarlyExit\n\n while stream_token_spec.has_next?\n @adaptor.add_child( root_1, stream_token_spec.next_tree )\n\n end\n stream_token_spec.reset\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n end\n \n return return_value\n end",
"def next_token\n\t\t@tokens.next_token\n\tend",
"def lexer(input)\n\t# We're gonna run a nice Lexer now\n\t# Get ourselves some tokens\n\t\n\ttokens = [] # Startin' with the input code in a mighty nice array\n\tc_line = 0 # current line in program\n\tspecial_case = false\n\teof_reached = false\n\t\n\tc_string = \"\"\t# the current string of chars\n\tc_pos = 1\t\t# current position in file\n\ts_pos = nil \t# current position of string\n\t\n\t# get a line of input\n\tfor line in input\n\t\t\n\t\t# check characters in line of input\n\t\tfor i in 0...line.length\n\t\t\n\t\t\t# checks for special cases\n\t\t\tif special_case\n\t\t\t\t\n\t\t\t\tlast_token = tokens[tokens.length - 1].type\n\t\t\t\t\n\t\t\t\t# Boolop\n\t\t\t\tif last_token == \"T_BOOLOP\"\n\t\t\t\t\t\n\t\t\t\t\tspecial_case = false\n\t\t\t\t\tnext\n\t\t\t\t\n\t\t\t\t# String time!\n\t\t\t\telsif last_token == \"T_QUOTE\"\n\t\t\t\t\n\t\t\t\t\t# make sure that we're not going to be using nil for tokenize\n\t\t\t\t\tif s_pos == nil\n\t\t\t\t\t\ts_pos = i\n\t\t\t\t\tend\n\t\t\t\n\t\t\t\t\t# check the different options\n\t\t\t\t\tcase line[i]\n\t\t\t\t\n\t\t\t\t\t# found a closing quotation mark\n\t\t\t\t\twhen /\"/\n\t\t\t\t\t\ttokens.push(tokenize(c_string, \"string\", c_line, s_pos))\n\t\t\t\t\t\ttokens.push(tokenize(line[i], \"op\", c_line, i))\n\t\t\t\t\t\tc_string = \"\"\n\t\t\t\t\t\tspecial_case = false\n\t\t\t\t\t\n\t\t\t\t\t# space or letter\n\t\t\t\t\twhen /( )/, $character\n\t\t\t\t\t\tc_string = c_string + line[i]\n\t\t\t\t\t\n\t\t\t\t\t# invalid options\n\t\t\t\t\telse\n\t\t\t\t\t\n\t\t\t\t\t\t# checks for end of line, else it's a bad character\n\t\t\t\t\t\tif i == line.length - 1\n\t\t\t\t\t\t\traise StringDetectionError.new(\"unclosed\", line[i], c_line, i)\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\traise StringDetectionError.new(\"char\", line[i], c_line, i)\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\n\t\t\n\t\t\t# test here for EOF symbol\n\t\t\telsif $eof.match(line[i])\n\t\t\t\t\n\t\t\t\t# tokenize current string\n\t\t\t\tif c_string != \"\"\n\t\t\t\t\ttokens.push(tokenize(c_string, \"character\", c_line, s_pos))\n\t\t\t\t\t\n\t\t\t\t\tc_string = \"\"\n\t\t\t\t\ts_pos = nil\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\teof_reached = true\n\t\t\t\t\n\t\t\t\t# tokenize '$'\n\t\t\t\ttokens.push(tokenize(line[i], \"op\", c_line, i))\n\t\t\t\t\n\t\t\t# Testin' for whitespace\n\t\t\telsif $space.match(line[i])\n\t\t\t\n\t\t\t\tif c_string != \"\"\n\t\t\t\t\ttokens.push(tokenize(c_string, \"character\", c_line, s_pos))\n\t\t\t\t\t\n\t\t\t\t\tc_string = \"\"\n\t\t\t\t\ts_pos = nil\n\t\t\t\tend\n\t\t\t\n\t\t\t# Testin' for operators\n\t\t\t# note: the whitespace issue was handled with the previous elsif\n\t\t\telsif $operator.match(line[i])\n\t\t\t\n\t\t\t\t# tokenize c_string if applicable\n\t\t\t\tif c_string != \"\"\n\t\t\t\t\ttokens.push(tokenize(c_string, \"character\", c_line, s_pos))\n\t\t\t\t\t\n\t\t\t\t\tc_string = \"\"\n\t\t\t\t\ts_pos = nil\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# test for that elusive boolop...\n\t\t\t\t# make sure we don't access a non-existent item...\n\t\t\t\tif i != line.length - 1\n\t\t\t\t\tif /[!|=]/.match(line[i]) and /=/.match(line[i + 1])\n\t\t\t\t\t\t# attempt to tokenize the operator\n\t\t\t\t\t\ttokens.push(tokenize(line[i] + line[i + 1], \"op\", c_line, i))\n\t\t\t\t\t\tspecial_case = true\n\t\t\t\t\telse\n\t\t\t\t\t\ttokens.push(tokenize(line[i], \"op\", c_line, i))\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\ttokens.push(tokenize(line[i], \"op\", c_line, i))\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# if op is \", start the string gathering process\n\t\t\t\tif /\"/.match(line[i])\n\t\t\t\t\tspecial_case = true\n\t\t\t\tend\n\t\t\t\t\n\t\t\t# Testin' for alpha numeric characters\n\t\t\telsif $character.match(line[i])\n\t\t\t\n\t\t\t\t# set position of current string\n\t\t\t\tif c_string == \"\" and s_pos == nil\n\t\t\t\t\ts_pos = c_pos\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# add new character to current string\n\t\t\t\tc_string = c_string + String(line[i])\n\t\t\t\t\n\t\t\telsif $digit.match(line[i])\n\t\t\t\n\t\t\t\t# test for more than one digit\n\t\t\t\t# make sure we don't access a non-existent item...\n\t\t\t\tif i != line.length - 1\n\t\t\t\t\tif $digit.match(line[i + 1])\n\t\t\t\t\t\traise UnknownSymbolError.new(line[i + 1], c_line, c_pos + 1)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\ttokens.push(tokenize(line[i], \"digit\", c_line, i))\n\t\t\t\n\t\t\t# else raise error for unknown symbol\n\t\t\telse\n\t\t\t\tif s_pos != nil\n\t\t\t\t\traise UnknownSymbolError.new(line[i], c_line, s_pos)\n\t\t\t\telse\n\t\t\t\t\traise UnknownSymbolError.new(line[i], c_line, c_pos)\n\t\t\t\tend\n\t\t\tend\n\t\t\t\n\t\t\t# update current position\n\t\t\tc_pos = update_cpos(c_pos, line[i])\n\n\t\tend\n\t\t\n\t\t\n\t\t# check for loose ends\n\t\tif special_case\n\t\t\n\t\t\t# check to make sure that all strings on this line are finished\n\t\t\tif tokens[tokens.length - 1] == \"T_QUOTE\" or tokens[tokens.length - 1] == \"T_STRING\"\n\t\t\t\traise StringDetectionError.new(\"unclosed\", \"\", c_line, 0)\n\t\t\t\n\t\t\t# if boolop, reset special_case\n\t\t\telsif tokens[tokens.length - 1] == \"T_BOOLOP\"\n\t\t\t\tspecial_case = false\n\t\t\tend\n\t\t\n\t\tend\n\t\t\n\t\t# check to make sure no current strings are left\n\t\tif c_string != \"\"\n\t\t\ttokens.push(tokenize(c_string, \"character\", c_line, s_pos))\n\t\tend\n\t\t\n\t\t# reset for next line\n\t\tc_string = \"\"\n\t\tc_pos = 0\n\t\ts_pos = nil\n\t\t\n\t\t# increment the line number\n\t\tc_line = c_line + 1\n\tend\n\t\n\t# if no EOF symbol ($) detected\n\tif !eof_reached\n\t\tbegin\n\t\t\traise EOFDetectionError.new(\"dne\", 0, 0)\n\t\trescue EOFDetectionError\n\t\t\ttokens.push(tokenize(\"$\", \"op\", c_line, 0))\n\t\tend\n\tend\n\t\n\t# return token list\n\treturn tokens\n\t\nend",
"def next_token\n @sy = @tokenizer.next_token\n \n # ignore EOL tokens since no productions would accept them\n while @sy.type == TokenType::EOL_TOKEN\n @sy = @tokenizer.next_token\n end\n end",
"def the\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 23 )\n the_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return \n end\n # at line 302:6: ( 'T' | 't' ) ( 'H' | 'h' ) ( 'E' | 'e' )\n if @input.peek( 1 ).between?( T__16, T__17 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__36, T__37 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__28, T__29 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 23 )\n memoize( __method__, the_start_index, success ) if @state.backtracking > 0\n\n end\n \n return \n end",
"def lex(input)\n input = input.clone\n tokens = []\n\n until input.empty? do\n input.sub!(NEXT_TOKEN, '') || fail(Error, \"Can't lex input here '#{input}'\")\n\n tokens.push($1)\n end\n tokens\n end",
"def advance\n if @token_queue.any?\n return @token_queue.shift\n end\n\n # Ugly, but dependent on Ragel output. Consider refactoring it somehow.\n klass = self.class\n _lex_trans_keys = klass.send :_lex_trans_keys\n _lex_key_spans = klass.send :_lex_key_spans\n _lex_index_offsets = klass.send :_lex_index_offsets\n _lex_indicies = klass.send :_lex_indicies\n _lex_trans_targs = klass.send :_lex_trans_targs\n _lex_trans_actions = klass.send :_lex_trans_actions\n _lex_to_state_actions = klass.send :_lex_to_state_actions\n _lex_from_state_actions = klass.send :_lex_from_state_actions\n _lex_eof_trans = klass.send :_lex_eof_trans\n\n pe = @source_pts.size + 2\n p, eof = @p, pe\n\n @command_state = (@cs == klass.lex_en_expr_value ||\n @cs == klass.lex_en_line_begin)\n\n \n# line 11015 \"lib/parser/lexer.rb\"\nbegin\n\ttestEof = false\n\t_slen, _trans, _keys, _inds, _acts, _nacts = nil\n\t_goto_level = 0\n\t_resume = 10\n\t_eof_trans = 15\n\t_again = 20\n\t_test_eof = 30\n\t_out = 40\n\twhile true\n\tif _goto_level <= 0\n\tif p == pe\n\t\t_goto_level = _test_eof\n\t\tnext\n\tend\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _resume\n\tcase _lex_from_state_actions[ @cs] \n\twhen 84 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = p\n\t\tend\n# line 11043 \"lib/parser/lexer.rb\"\n\tend\n\t_keys = @cs << 1\n\t_inds = _lex_index_offsets[ @cs]\n\t_slen = _lex_key_spans[ @cs]\n\t_wide = ( (@source_pts[p] || 0))\n\t_trans = if ( _slen > 0 && \n\t\t\t_lex_trans_keys[_keys] <= _wide && \n\t\t\t_wide <= _lex_trans_keys[_keys + 1] \n\t\t ) then\n\t\t\t_lex_indicies[ _inds + _wide - _lex_trans_keys[_keys] ] \n\t\t else \n\t\t\t_lex_indicies[ _inds + _slen ]\n\t\t end\n\tend\n\tif _goto_level <= _eof_trans\n\t @cs = _lex_trans_targs[_trans]\n\tif _lex_trans_actions[_trans] != 0\n\tcase _lex_trans_actions[_trans]\n\twhen 28 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 103 then\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 29 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n\twhen 56 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n\twhen 60 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 283 then\n# line 1172 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 36 then\n# line 1441 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 38 then\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 40 then\n# line 1485 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 66 then\n# line 1675 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n\twhen 323 then\n# line 1729 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n\twhen 352 then\n# line 1802 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 428 then\n# line 2020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n\twhen 422 then\n# line 2021 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n\twhen 425 then\n# line 2022 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n\twhen 419 then\n# line 2023 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n\twhen 434 then\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n\twhen 402 then\n# line 2025 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n\twhen 414 then\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 409 then\n# line 2083 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 407 then\n# line 2084 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 80 then\n# line 2219 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 7 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 100 then\n# line 1013 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 5 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 96 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 95 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 101 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 98 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 99 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 6 then\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 4 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 127 then\n# line 1013 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 10 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 124 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 123 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 128 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 126 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 11 then\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 9 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 150 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 149 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 152 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 153 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 156 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 155 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 158 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 165 then\n# line 1013 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 13 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 162 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 161 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 166 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 164 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 12 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 168 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 167 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 175 then\n# line 1013 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 15 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 171 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 170 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 176 then\n# line 958 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 173 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 174 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 14 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 178 then\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 177 then\n# line 821 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 758;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 180 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 181 then\n# line 1103 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tREGEXP_OPT, tok(@ts, @te - 1), @ts, @te - 1)\n p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 182 then\n# line 1091 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n unknown_options = tok.scan(/[^imxouesn]/)\n if unknown_options.any?\n diagnostic :error, :regexp_options,\n { :options => unknown_options.join }\n end\n\n emit(:tREGEXP_OPT)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 16 then\n# line 1231 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 183 then\n# line 1231 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 185 then\n# line 1244 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@@[0-9]/\n diagnostic :error, :cvar_name, { :name => tok }\n end\n\n emit(:tCVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 184 then\n# line 1254 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@[0-9]/\n diagnostic :error, :ivar_name, { :name => tok }\n end\n\n emit(:tIVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 206 then\n# line 1275 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(KEYWORDS_BEGIN);\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 192 then\n# line 1283 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tIDENTIFIER)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 18 then\n# line 1287 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n @cs = 773; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 189 then\n# line 1296 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 201 then\n# line 1300 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 20 then\n# line 1306 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if version?(23)\n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 188 then\n# line 1319 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 187 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 205 then\n# line 1275 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS_BEGIN);\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 202 then\n# line 1279 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 204 then\n# line 1283 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 199 then\n# line 1287 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n @cs = 773; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 195 then\n# line 1296 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 200 then\n# line 1303 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 193 then\n# line 1316 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 198 then\n# line 1319 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 19 then\n# line 1296 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit_table(PUNCTUATION)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 17 then\n# line 1319 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 191 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 39 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN);\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 40 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 41 then\n\tbegin begin p = (( @te))-1; end\n emit(:tIDENTIFIER)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 22 then\n# line 1331 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n p = p - 1; @cs = 758; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 208 then\n# line 1337 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 207 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 210 then\n# line 1334 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 209 then\n# line 1337 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 21 then\n# line 1337 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 216 then\n# line 1363 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 215 then\n# line 1369 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 214 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 226 then\n# line 1348 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 217 then\n# line 1352 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 222 then\n# line 1363 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 220 then\n# line 1366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 225 then\n# line 1369 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 249 then\n# line 1427 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 232 then\n# line 1445 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 238 then\n# line 1469 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 24 then\n# line 1477 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 240 then\n# line 1486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 39 then\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 227 then\n# line 1511 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 228 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 239 then\n# line 1436 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 235 then\n# line 1458 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 237 then\n# line 1474 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 231 then\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 230 then\n# line 1502 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 248 then\n# line 1511 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 25 then\n# line 1502 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 41 then\n# line 1511 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 23 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 67 then\n\tbegin begin p = (( @te))-1; end\n\n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 68 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 73 then\n\tbegin begin p = (( @te))-1; end\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\telse\n\tbegin begin p = (( @te))-1; end\nend\nend \n\t\t\tend\n\twhen 43 then\n# line 1547 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 253 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 254 then\n# line 1547 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 44 then\n# line 1547 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 42 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 80 then\n\tbegin begin p = (( @te))-1; end\n\n if @cond.active?\n emit(:kDO_COND, 'do'.freeze, @te - 2, @te)\n else\n emit(:kDO, 'do'.freeze, @te - 2, @te)\n end\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 81 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 264 then\n# line 1581 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_do(true)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 257 then\n# line 1587 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 258 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 259 then\n# line 1584 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 262 then\n# line 1587 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 268 then\n# line 1611 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 267 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 276 then\n# line 1603 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 270 then\n# line 1605 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 274 then\n# line 1611 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 269 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 88 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 89 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 53 then\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 305 then\n# line 1643 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type = delimiter = tok[0].chr\n p = p - 1; \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 299 then\n# line 1650 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = @source_buffer.slice(@ts).chr, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 51 then\n# line 1657 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 306 then\n# line 1703 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 65 then\n# line 1717 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 320 then\n# line 1748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 319 then\n# line 1758 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 307 then\n# line 1797 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 48 then\n# line 1817 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1;\n\n if version?(18)\n ident = tok(@ts, @te - 2)\n\n emit((@source_buffer.slice(@ts) =~ /[A-Z]/) ? :tCONSTANT : :tIDENTIFIER,\n ident, @ts, @te - 2)\n p = p - 1; # continue as a symbol\n\n if !@static_env.nil? && @static_env.declared?(ident)\n @cs = 773;\n else\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n @cs = 758;\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 293 then\n# line 1863 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 52 then\n# line 1876 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 279 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 304 then\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 303 then\n# line 1633 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tSTAR, '*'.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 300 then\n# line 1657 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 298 then\n# line 1663 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 308 then\n# line 1717 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 318 then\n# line 1758 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 324 then\n# line 1764 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 301 then\n# line 1797 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 346 then\n# line 1216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 290 then\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 292 then\n# line 1863 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 295 then\n# line 1876 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 50 then\n# line 1663 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 68 then\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 49 then\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 64 then\n# line 1876 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 47 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 111 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 112 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 113 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 115 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 116 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 119 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 354 then\n# line 1896 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 355 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 356 then\n# line 1884 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 360 then\n# line 1896 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 71 then\n# line 1906 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 364 then\n# line 1911 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n \tbegin\n\t\t @cs = (push_literal(tok, tok, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 363 then\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 362 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 366 then\n# line 1915 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 365 then\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 70 then\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 396 then\n# line 1932 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tLAMBDA, '->'.freeze, @ts, @ts + 2)\n\n @lambda_stack.push @paren_nest\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 78 then\n# line 1969 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 374 then\n# line 2104 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts, nil, false, false, true))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 73 then\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 393 then\n# line 2129 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 453; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 387 then\n# line 2156 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 380 then\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION)\n @cond.lexpop; @cmdarg.lexpop\n\n if RBRACE_OR_RBRACK.include?(tok)\n @cs = 511;\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 392 then\n# line 2174 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tOP_ASGN, tok(@ts, @te - 1))\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 378 then\n# line 2178 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tEH, '?'.freeze)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 375 then\n# line 2186 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 377 then\n# line 2199 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tSEMI, ';'.freeze)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 442 then\n# line 2202 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :error, :bare_backslash, nil, range(@ts, @ts + 1)\n p = p - 1;\n end\n\t\tend\n\twhen 373 then\n# line 2208 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 372 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 452 then\n# line 1965 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 333; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 450 then\n# line 1969 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 449 then\n# line 1980 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 399 then\n# line 2054 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 439 then\n# line 2114 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 391 then\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 397 then\n# line 2129 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 453; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 444 then\n# line 1216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 386 then\n# line 2156 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 398 then\n# line 2186 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 384 then\n# line 2193 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 390 then\n# line 2208 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 74 then\n# line 2054 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 72 then\n# line 2208 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 75 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 132 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 133 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 333; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 134 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 135 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 136 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 137 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 138 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 139 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 140 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 141 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 143 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 144 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 145 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 151 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 152 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\nend \n\t\t\tend\n\twhen 82 then\n# line 2220 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 457 then\n# line 2223 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 171; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 458 then\n# line 2223 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 171; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 79 then\n# line 2223 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 171; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 461 then\n# line 2233 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 460 then\n# line 2241 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :embedded_document, nil,\n range(@eq_begin_s, @eq_begin_s + '=begin'.length)\n end\n\t\tend\n\twhen 93 then\n# line 2251 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 949\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 2 then\n# line 2255 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 85 then\n# line 2258 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 86 then\n# line 518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 87 then\n# line 2248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 92 then\n# line 2251 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 949\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 91 then\n# line 2258 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 1 then\n# line 2258 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 765\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 63 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 97 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 125 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 151 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 157 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 163 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 169 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 172 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 179 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 250 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1427 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 241 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 233 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 321 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 294 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1863 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 462 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2233 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 459 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2238 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 94 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2251 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 949\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 3 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2255 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 416 then\n# line 626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 415 then\n# line 627 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, chars)) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 417 then\n# line 628 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 410 then\n# line 632 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Float(chars))) } \t\tend\n# line 2086 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 411 then\n# line 636 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2086 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 412 then\n# line 637 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2086 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 120 then\n# line 652 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 146 then\n# line 652 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 342 then\n# line 652 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n codepoints.split(/[ \\t]/).each do |codepoint_str|\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length + 1\n end\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 104 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 130 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 326 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 107 then\n# line 679 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 133 then\n# line 679 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 329 then\n# line 679 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 109 then\n# line 698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 135 then\n# line 698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 331 then\n# line 698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 106 then\n# line 705 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 132 then\n# line 705 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 328 then\n# line 705 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 122 then\n# line 709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 148 then\n# line 709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 344 then\n# line 709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 117 then\n# line 713 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 143 then\n# line 713 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 339 then\n# line 713 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 121 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 147 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 343 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 116 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 142 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 338 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 119 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 145 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 341 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 105 then\n# line 766 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 131 then\n# line 766 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 327 then\n# line 766 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 154 then\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 159 then\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 54 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 30 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1441 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 32 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 34 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1485 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 194 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1316 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 213 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1334 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 221 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 33 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 252 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1502 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 244 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1508 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 263 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1584 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 275 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1605 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 271 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1608 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 55 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 291 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 361 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1884 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 357 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1887 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @in_kwarg\n p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 370 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1915 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 367 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1918 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 171\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 443 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2193 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 385 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2196 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 946\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 88 then\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 243 then\n# line 983 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1410 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n emit(:tLCURLY, '{'.freeze, @te - 1, @te)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 265 then\n# line 983 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1570 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE_ARG, '{'.freeze)\n end\n @cs = 765;\n end\n\t\tend\n\twhen 353 then\n# line 983 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1775 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE, '{'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 455 then\n# line 983 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1940 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 456 then\n# line 992 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n current_literal = literal\n if current_literal\n if current_literal.end_interp_brace_and_try_closing\n if version?(18, 19)\n emit(:tRCURLY, '}'.freeze, p - 1, p)\n else\n emit(:tSTRING_DEND, '}'.freeze, p - 1, p)\n end\n\n if current_literal.saved_herebody_s\n @herebody_s = current_literal.saved_herebody_s\n end\n\n p = p - 1;\n @cs = (stack_pop);\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n \t\tend\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n @cond.lexpop; @cmdarg.lexpop\n\n if RBRACE_OR_RBRACK.include?(tok)\n @cs = 511;\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 57 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 61 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 197 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1316 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 212 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1334 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 224 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 246 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1505 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 261 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1584 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 273 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1605 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 297 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 359 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1884 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 369 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1915 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 389 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2193 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 90 then\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 218 then\n# line 1172 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1356 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 309 then\n# line 1172 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 282 then\n# line 1172 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 382 then\n# line 1172 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 132 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 133 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 333; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 134 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 135 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 136 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 137 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 138 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 139 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 140 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 141 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 143 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 144 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 145 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 151 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 152 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\nend \n\t\t\tend\n\twhen 219 then\n# line 1173 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1356 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 310 then\n# line 1173 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 284 then\n# line 1173 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 383 then\n# line 1173 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2136 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 311 then\n# line 1178 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 285 then\n# line 1178 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 312 then\n# line 1179 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 286 then\n# line 1179 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 316 then\n# line 1180 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 289 then\n# line 1180 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 315 then\n# line 1181 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 288 then\n# line 1181 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 111 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 112 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 113 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 115 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 116 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 119 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 313 then\n# line 1182 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 287 then\n# line 1182 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 314 then\n# line 1187 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1709 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 438 then\n# line 1192 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2118 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT, tok(@ts, tm), @ts, tm)\n p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 242 then\n# line 1198 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 1404 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze, @te - 1, @te)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 345 then\n# line 1198 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 1787 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 441 then\n# line 1198 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 2182 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK2, '['.freeze)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 234 then\n# line 1205 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1385 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if version?(18)\n emit(:tLPAREN2, '('.freeze, @te - 1, @te)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 247 then\n# line 1205 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1398 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN2, '('.freeze)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 255 then\n# line 1205 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1524 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n if version?(18)\n @cs = 765; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 302 then\n# line 1205 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1792 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN, '('.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 394 then\n# line 1205 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 2156 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 395 then\n# line 1211 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n @cond.lexpop; @cmdarg.lexpop\n\n if RBRACE_OR_RBRACK.include?(tok)\n @cs = 511;\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 67 then\n# line 1675 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 317 then\n# line 1676 \"lib/parser/lexer.rl\"\n\t\tbegin\n new_herebody_s = p \t\tend\n# line 1677 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n tok(@ts, heredoc_e) =~ /^<<(-?)(~?)([\"'`]?)(.*)\\3$/\n\n indent = !$1.empty? || !$2.empty?\n dedent_body = !$2.empty?\n type = $3.empty? ? '<<\"'.freeze : ('<<'.freeze + $3)\n delimiter = $4\n\n if dedent_body && version?(18, 19, 20, 21, 22)\n emit(:tLSHFT, '<<'.freeze, @ts, @ts + 2)\n p = @ts + 1\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (push_literal(type, delimiter, @ts, heredoc_e, indent, dedent_body));\n\n @herebody_s ||= new_herebody_s\n p = @herebody_s - 1\n end\n end\n\t\tend\n\twhen 322 then\n# line 1729 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 349 then\n# line 1802 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1803 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 429 then\n# line 2020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 423 then\n# line 2021 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 426 then\n# line 2022 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 420 then\n# line 2023 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 435 then\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 404 then\n# line 2025 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 436 then\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 81 then\n# line 2219 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2220 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 8 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 203 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1275 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 39;\t\tend\n\twhen 190 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1279 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 40;\t\tend\n\twhen 186 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1283 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 41;\t\tend\n\twhen 26 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1445 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 67;\t\tend\n\twhen 236 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1458 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 68;\t\tend\n\twhen 27 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 229 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1502 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 74;\t\tend\n\twhen 256 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1534 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 80;\t\tend\n\twhen 45 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1547 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 81;\t\tend\n\twhen 277 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1599 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 88;\t\tend\n\twhen 266 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1603 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 89;\t\tend\n\twhen 280 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1797 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 111;\t\tend\n\twhen 348 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1803 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 112;\t\tend\n\twhen 347 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1809 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 113;\t\tend\n\twhen 69 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 115;\t\tend\n\twhen 278 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 116;\t\tend\n\twhen 281 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1876 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 119;\t\tend\n\twhen 451 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1940 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 132;\t\tend\n\twhen 446 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1965 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 133;\t\tend\n\twhen 454 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1975 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 135;\t\tend\n\twhen 447 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1980 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 136;\t\tend\n\twhen 448 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1984 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 137;\t\tend\n\twhen 453 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1988 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 138;\t\tend\n\twhen 445 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1999 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 139;\t\tend\n\twhen 440 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2013 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 140;\t\tend\n\twhen 376 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 141;\t\tend\n\twhen 406 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2071 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 144;\t\tend\n\twhen 76 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2086 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 145;\t\tend\n\twhen 379 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2114 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 147;\t\tend\n\twhen 371 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 151;\t\tend\n\twhen 381 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2136 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 152;\t\tend\n\twhen 160 then\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 883 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 110 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 136 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 332 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 115 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 141 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 337 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 114 then\n# line 693 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 140 then\n# line 693 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 336 then\n# line 693 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 112 then\n# line 698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 138 then\n# line 698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 334 then\n# line 698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 108 then\n# line 699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 134 then\n# line 699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 330 then\n# line 699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 118 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 144 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 340 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 102 then\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 766 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 129 then\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 766 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 325 then\n# line 772 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 766 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 58 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 196 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1316 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 211 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1334 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 223 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 245 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1505 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 260 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1584 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 272 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1605 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 296 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1860 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 358 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1884 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 368 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1915 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 388 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2193 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 89 then\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 431 then\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 401 then\n# line 2025 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 413 then\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 408 then\n# line 2083 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 631 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2086 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 405 then\n# line 2084 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 631 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2086 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 251 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 492 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1502 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 74;\t\tend\n\twhen 35 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 46 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1547 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 81;\t\tend\n\twhen 62 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 94;\t\tend\n\twhen 77 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1969 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 134;\t\tend\n\twhen 37 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1458 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 68;\t\tend\n\twhen 351 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1802 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 115;\t\tend\n\twhen 350 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1802 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 116;\t\tend\n\twhen 432 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 141;\t\tend\n\twhen 113 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 139 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 335 then\n# line 672 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 111 then\n# line 699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 137 then\n# line 699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 845 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed.\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 333 then\n# line 699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 683 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 687 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n if defined?(Encoding)\n emit(:tINTEGER, value.dup.force_encoding(Encoding::BINARY)[0].ord)\n else\n emit(:tINTEGER, value[0].ord)\n end\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 427 then\n# line 2020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 421 then\n# line 2021 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 424 then\n# line 2022 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 418 then\n# line 2023 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 430 then\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 400 then\n# line 2025 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 31 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 812 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1458 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 68;\t\tend\n\twhen 59 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1128 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1131 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 94;\t\tend\n\twhen 437 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2059 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 143;\t\tend\n\twhen 433 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2059 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 143;\t\tend\n\twhen 403 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2025 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2026 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2059 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 143;\t\tend\n# line 21651 \"lib/parser/lexer.rb\"\n\tend\n\tend\n\tend\n\tif _goto_level <= _again\n\tcase _lex_to_state_actions[ @cs] \n\twhen 83 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = nil;\t\tend\n# line 21661 \"lib/parser/lexer.rb\"\n\tend\n\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tp += 1\n\tif p != pe\n\t\t_goto_level = _resume\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _test_eof\n\tif p == eof\n\tif _lex_eof_trans[ @cs] > 0\n\t\t_trans = _lex_eof_trans[ @cs] - 1;\n\t\t_goto_level = _eof_trans\n\t\tnext;\n\tend\n\tend\n\n\tend\n\tif _goto_level <= _out\n\t\tbreak\n\tend\nend\n\tend\n\n# line 282 \"lib/parser/lexer.rl\"\n # %\n\n @p = p\n\n if @token_queue.any?\n @token_queue.shift\n elsif @cs == klass.lex_error\n [ false, [ '$error'.freeze, range(p - 1, p) ] ]\n else\n eof = @source_pts.size\n [ false, [ '$eof'.freeze, range(eof, eof) ] ]\n end\n end",
"def advance\n if @token_queue.any?\n return @token_queue.shift\n end\n\n # Ugly, but dependent on Ragel output. Consider refactoring it somehow.\n klass = self.class\n _lex_trans_keys = klass.send :_lex_trans_keys\n _lex_key_spans = klass.send :_lex_key_spans\n _lex_index_offsets = klass.send :_lex_index_offsets\n _lex_indicies = klass.send :_lex_indicies\n _lex_trans_targs = klass.send :_lex_trans_targs\n _lex_trans_actions = klass.send :_lex_trans_actions\n _lex_to_state_actions = klass.send :_lex_to_state_actions\n _lex_from_state_actions = klass.send :_lex_from_state_actions\n _lex_eof_trans = klass.send :_lex_eof_trans\n\n pe = @source_pts.size + 2\n p, eof = @p, pe\n\n cmd_state = @command_start\n @command_start = false\n\n \n# line 11330 \"lib/parser/lexer.rb\"\nbegin\n\ttestEof = false\n\t_slen, _trans, _keys, _inds, _acts, _nacts = nil\n\t_goto_level = 0\n\t_resume = 10\n\t_eof_trans = 15\n\t_again = 20\n\t_test_eof = 30\n\t_out = 40\n\twhile true\n\tif _goto_level <= 0\n\tif p == pe\n\t\t_goto_level = _test_eof\n\t\tnext\n\tend\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _resume\n\tcase _lex_from_state_actions[ @cs] \n\twhen 97 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = p\n\t\tend\n# line 11358 \"lib/parser/lexer.rb\"\n\tend\n\t_keys = @cs << 1\n\t_inds = _lex_index_offsets[ @cs]\n\t_slen = _lex_key_spans[ @cs]\n\t_wide = ( (@source_pts[p] || 0))\n\t_trans = if ( _slen > 0 && \n\t\t\t_lex_trans_keys[_keys] <= _wide && \n\t\t\t_wide <= _lex_trans_keys[_keys + 1] \n\t\t ) then\n\t\t\t_lex_indicies[ _inds + _wide - _lex_trans_keys[_keys] ] \n\t\t else \n\t\t\t_lex_indicies[ _inds + _slen ]\n\t\t end\n\tend\n\tif _goto_level <= _eof_trans\n\t @cs = _lex_trans_targs[_trans]\n\tif _lex_trans_actions[_trans] != 0\n\tcase _lex_trans_actions[_trans]\n\twhen 29 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 117 then\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 30 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n\twhen 60 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n\twhen 64 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 310 then\n# line 1273 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 37 then\n# line 1566 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 39 then\n# line 1586 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 41 then\n# line 1614 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 71 then\n# line 1806 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n\twhen 349 then\n# line 1898 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 1; diag_msg = :ivar_name \t\tend\n\twhen 352 then\n# line 1899 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2; diag_msg = :cvar_name \t\tend\n\twhen 360 then\n# line 1919 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n\twhen 392 then\n# line 2008 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 308 then\n# line 2115 \"lib/parser/lexer.rl\"\n\t\tbegin\n ident_tok = tok; ident_ts = @ts; ident_te = @te; \t\tend\n\twhen 479 then\n# line 2301 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n\twhen 473 then\n# line 2302 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n\twhen 476 then\n# line 2303 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n\twhen 470 then\n# line 2304 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n\twhen 485 then\n# line 2305 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n\twhen 447 then\n# line 2306 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n\twhen 462 then\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 455 then\n# line 2364 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 452 then\n# line 2365 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 89 then\n# line 2575 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 7 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 113 then\n# line 1106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 803;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 5 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 109 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 108 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 115 then\n# line 1048 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 114 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 111 then\n# line 1019 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 112 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 6 then\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 4 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 145 then\n# line 1106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 803;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 10 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 142 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 141 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 147 then\n# line 1048 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 146 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 144 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 11 then\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 9 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 173 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 172 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 175 then\n# line 1019 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 176 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 179 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 178 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 181 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 188 then\n# line 1106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 803;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 13 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 185 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 184 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 190 then\n# line 1048 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 189 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 187 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 12 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 192 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 191 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 199 then\n# line 1106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 803;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 15 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 195 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 194 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 201 then\n# line 1048 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 200 then\n# line 1032 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 197 then\n# line 1019 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 198 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 14 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 203 then\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 202 then\n# line 874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 796;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 205 then\n# line 1019 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 206 then\n# line 1203 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tREGEXP_OPT, tok(@ts, @te - 1), @ts, @te - 1)\n p = p - 1;\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 207 then\n# line 1190 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n unknown_options = tok.scan(/[^imxouesn]/)\n if unknown_options.any?\n diagnostic :error, :regexp_options,\n { :options => unknown_options.join }\n end\n\n emit(:tREGEXP_OPT)\n @cs = 811;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 16 then\n# line 1342 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 208 then\n# line 1342 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 210 then\n# line 1355 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@@[0-9]/\n diagnostic :error, :cvar_name, { :name => tok }\n end\n\n emit(:tCVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 209 then\n# line 1365 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@[0-9]/\n diagnostic :error, :ivar_name, { :name => tok }\n end\n\n emit(:tIVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 231 then\n# line 1386 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(KEYWORDS_BEGIN);\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 217 then\n# line 1394 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tIDENTIFIER)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 18 then\n# line 1398 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n @cs = 811; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 214 then\n# line 1407 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 226 then\n# line 1411 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 20 then\n# line 1417 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if version?(23)\n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 213 then\n# line 1430 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 212 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 230 then\n# line 1386 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS_BEGIN);\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 227 then\n# line 1390 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 229 then\n# line 1394 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 224 then\n# line 1398 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n @cs = 811; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 220 then\n# line 1407 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 225 then\n# line 1414 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 218 then\n# line 1427 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 223 then\n# line 1430 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 19 then\n# line 1407 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit_table(PUNCTUATION)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 17 then\n# line 1430 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 216 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 43 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN);\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 44 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 45 then\n\tbegin begin p = (( @te))-1; end\n emit(:tIDENTIFIER)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 22 then\n# line 1442 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n p = p - 1; @cs = 796; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 233 then\n# line 1460 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 232 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 235 then\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 234 then\n# line 1460 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 21 then\n# line 1460 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 241 then\n# line 1486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 240 then\n# line 1492 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 239 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 251 then\n# line 1471 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 242 then\n# line 1475 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 247 then\n# line 1486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 245 then\n# line 1489 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 250 then\n# line 1492 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 274 then\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 257 then\n# line 1570 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n if @version < 30\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n else\n diagnostic :warning, :ambiguous_regexp, nil, range(tm, tm + 1)\n end\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 263 then\n# line 1598 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 25 then\n# line 1606 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 265 then\n# line 1615 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 40 then\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 252 then\n# line 1640 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 253 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 264 then\n# line 1561 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 260 then\n# line 1587 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 262 then\n# line 1603 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 256 then\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 255 then\n# line 1631 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 273 then\n# line 1640 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 26 then\n# line 1631 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 42 then\n# line 1640 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 24 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 72 then\n\tbegin begin p = (( @te))-1; end\n\n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n if @version < 30\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n else\n diagnostic :warning, :ambiguous_regexp, nil, range(tm, tm + 1)\n end\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 73 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 78 then\n\tbegin begin p = (( @te))-1; end\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\telse\n\tbegin begin p = (( @te))-1; end\nend\nend \n\t\t\tend\n\twhen 44 then\n# line 1676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 495\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 278 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 279 then\n# line 1676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 495\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 45 then\n# line 1676 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 495\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 43 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 85 then\n\tbegin begin p = (( @te))-1; end\n\n if @cond.active?\n emit(:kDO_COND, 'do'.freeze, @te - 2, @te)\n else\n emit(:kDO, 'do'.freeze, @te - 2, @te)\n end\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 86 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 495\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 289 then\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_do(true)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 282 then\n# line 1718 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 283 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 284 then\n# line 1715 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 287 then\n# line 1718 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 293 then\n# line 1742 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 292 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 301 then\n# line 1734 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 295 then\n# line 1736 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 299 then\n# line 1742 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 294 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 93 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 57 then\n# line 1757 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 334 then\n# line 1774 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type = delimiter = tok[0].chr\n p = p - 1; \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 326 then\n# line 1781 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = @source_buffer.slice(@ts).chr, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 55 then\n# line 1788 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 347 then\n# line 1863 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1; p = p - 1;\n emit(:tSYMBEG, tok(@ts, @ts + 1), @ts, @ts + 1)\n \tbegin\n\t\t @cs = 353\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 335 then\n# line 1871 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 346 then\n# line 1879 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1, @ts + 2))\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 69 then\n# line 1893 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 357 then\n# line 1934 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 356 then\n# line 1944 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 336 then\n# line 2003 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 52 then\n# line 2024 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1;\n\n if version?(18)\n ident = tok(@ts, @te - 2)\n\n emit((@source_buffer.slice(@ts) =~ /[A-Z]/) ? :tCONSTANT : :tIDENTIFIER,\n ident, @ts, @te - 2)\n p = p - 1; # continue as a symbol\n\n if !@static_env.nil? && @static_env.declared?(ident)\n @cs = 811;\n else\n @cs = (arg_or_cmdarg(cmd_state));\n end\n else\n emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n @cs = 796;\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 49 then\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te)\n p = ident_te - 1\n\n if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25\n @cs = 466;\n else\n @cs = 526;\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 320 then\n# line 2136 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 187\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 56 then\n# line 2152 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 304 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 330 then\n# line 1757 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 329 then\n# line 1764 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tSTAR, '*'.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 325 then\n# line 1794 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 354 then\n# line 1854 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 337 then\n# line 1893 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 350 then\n# line 1901 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te)\n else\n emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = @ts\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 355 then\n# line 1944 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 361 then\n# line 1950 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 327 then\n# line 2003 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 331 then\n# line 2051 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n emit(:tBDOT2)\n else\n emit(:tDOT2)\n end\n\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 332 then\n# line 2062 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n # Here we scan and conditionally emit \"\\n\":\n # + if it's there\n # + and emitted we do nothing\n # + and not emitted we return `p` to \"\\n\" to process it on the next scan\n # + if it's not there we do nothing\n followed_by_nl = @te - 1 == @newline_s\n nl_emitted = false\n dots_te = followed_by_nl ? @te - 1 : @te\n\n if @version >= 30\n if @lambda_stack.any? && @lambda_stack.last + 1 == @paren_nest\n # To reject `->(...)` like `->...`\n emit(:tDOT3, '...'.freeze, @ts, dots_te)\n else\n emit(:tBDOT3, '...'.freeze, @ts, dots_te)\n\n if @version >= 31 && followed_by_nl && @context.in_def_open_args?\n emit(:tNL, @te - 1, @te)\n nl_emitted = true\n end\n end\n elsif @version >= 27\n emit(:tBDOT3, '...'.freeze, @ts, dots_te)\n else\n emit(:tDOT3, '...'.freeze, @ts, dots_te)\n end\n\n if followed_by_nl && !nl_emitted\n # return \"\\n\" to process it on the next scan\n p = p - 1;\n end\n\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 307 then\n# line 1327 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 317 then\n# line 2133 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 319 then\n# line 2136 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 187\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 322 then\n# line 2152 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 54 then\n# line 1794 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 73 then\n# line 1854 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 74 then\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 48 then\n# line 1327 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 53 then\n# line 2133 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 68 then\n# line 2152 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 51 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 99 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 106 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\twhen 117 then\n\tbegin begin p = (( @te))-1; end\n\n if @version >= 27\n emit(:tPIPE, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1;\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n p -= 2\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\twhen 121 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 540; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 123 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 127 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 128 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 132 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 395 then\n# line 2172 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 396 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 397 then\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 401 then\n# line 2172 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 77 then\n# line 2182 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 405 then\n# line 2187 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n \tbegin\n\t\t @cs = (push_literal(tok, tok, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 404 then\n# line 2197 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 403 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 407 then\n# line 2191 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 406 then\n# line 2197 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 76 then\n# line 2197 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 564\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 440 then\n# line 2208 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tLAMBDA, '->'.freeze, @ts, @ts + 2)\n\n @lambda_stack.push @paren_nest\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 86 then\n# line 2249 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 415 then\n# line 2385 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts, nil, false, false, true))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 79 then\n# line 2403 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 436 then\n# line 2410 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 489 then\n# line 2434 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 429 then\n# line 2443 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION);\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 434 then\n# line 2478 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tOP_ASGN, tok(@ts, @te - 1))\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 420 then\n# line 2482 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tEH, '?'.freeze)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 417 then\n# line 2501 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 419 then\n# line 2514 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tSEMI, ';'.freeze)\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 494 then\n# line 2518 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :error, :bare_backslash, nil, range(@ts, @ts + 1)\n p = p - 1;\n end\n\t\tend\n\twhen 414 then\n# line 2524 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 413 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 505 then\n# line 2245 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 353; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 503 then\n# line 2249 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 502 then\n# line 2260 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 444 then\n# line 2335 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 491 then\n# line 2395 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 433 then\n# line 2403 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 348\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 441 then\n# line 2410 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 497 then\n# line 1327 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 439 then\n# line 2434 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 435 then\n# line 2443 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION);\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 428 then\n# line 2449 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 442 then\n# line 2501 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 426 then\n# line 2508 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 432 then\n# line 2524 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 84 then\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 80 then\n# line 2335 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 83 then\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 78 then\n# line 2524 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 81 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 145 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n if tok == '{'.freeze\n @paren_nest += 1\n end\n @command_start = true\n\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 146 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 353; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 148 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 149 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 150 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 540; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 151 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 152 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg(cmd_state));\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 153 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 154 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 156 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 157 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 158 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 160 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 164 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 165 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 167 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(PUNCTUATION);\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 168 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 516 then\n# line 2565 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tNL, nil, @newline_s, @newline_s + 1)\n if @version < 27\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tBDOT3)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 95 then\n# line 2576 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 510 then\n# line 2579 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 513 then\n# line 2540 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version < 27\n # Ruby before 2.7 doesn't support comments before leading dot.\n # If a line after \"a\" starts with a comment then \"a\" is a self-contained statement.\n # So in that case we emit a special tNL token and start reading the\n # next line as a separate statement.\n #\n # Note: block comments before leading dot are not supported on any version of Ruby.\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 515 then\n# line 2554 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tNL, nil, @newline_s, @newline_s + 1)\n if @version < 27\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tBDOT2)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 514 then\n# line 2576 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = tm - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 512 then\n# line 2579 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 90 then\n# line 2540 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n if @version < 27\n # Ruby before 2.7 doesn't support comments before leading dot.\n # If a line after \"a\" starts with a comment then \"a\" is a self-contained statement.\n # So in that case we emit a special tNL token and start reading the\n # next line as a separate statement.\n #\n # Note: block comments before leading dot are not supported on any version of Ruby.\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 87 then\n# line 2579 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 91 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 181 then\n\tbegin begin p = (( @te))-1; end\n\n if @version < 27\n # Ruby before 2.7 doesn't support comments before leading dot.\n # If a line after \"a\" starts with a comment then \"a\" is a self-contained statement.\n # So in that case we emit a special tNL token and start reading the\n # next line as a separate statement.\n #\n # Note: block comments before leading dot are not supported on any version of Ruby.\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 185 then\n\tbegin begin p = (( @te))-1; end\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 187; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 519 then\n# line 2589 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = (@cs_before_block_comment)\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 518 then\n# line 2597 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :embedded_document, nil,\n range(@eq_begin_s, @eq_begin_s + '=begin'.length)\n end\n\t\tend\n\twhen 106 then\n# line 2607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 999\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 2 then\n# line 2611 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 98 then\n# line 2614 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin cmd_state = true; p = p - 1; \tbegin\n\t\t @cs = 803\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 99 then\n# line 528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 100 then\n# line 2604 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 105 then\n# line 2607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 999\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 104 then\n# line 2614 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin cmd_state = true; p = p - 1; \tbegin\n\t\t @cs = 803\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 1 then\n# line 2614 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin cmd_state = true; p = p - 1; \tbegin\n\t\t @cs = 803\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 67 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 110 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 143 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 174 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 180 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 186 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 193 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 196 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 204 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 23 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1446 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @version >= 31\n emit(:tBDOT3, '...'.freeze, @ts, @te - 1)\n emit(:tNL, \"\\n\".freeze, @te - 1, @te)\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n p -= 4;\n p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 275 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 266 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1615 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 258 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 358 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1934 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 333 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2062 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Here we scan and conditionally emit \"\\n\":\n # + if it's there\n # + and emitted we do nothing\n # + and not emitted we return `p` to \"\\n\" to process it on the next scan\n # + if it's not there we do nothing\n followed_by_nl = @te - 1 == @newline_s\n nl_emitted = false\n dots_te = followed_by_nl ? @te - 1 : @te\n\n if @version >= 30\n if @lambda_stack.any? && @lambda_stack.last + 1 == @paren_nest\n # To reject `->(...)` like `->...`\n emit(:tDOT3, '...'.freeze, @ts, dots_te)\n else\n emit(:tBDOT3, '...'.freeze, @ts, dots_te)\n\n if @version >= 31 && followed_by_nl && @context.in_def_open_args?\n emit(:tNL, @te - 1, @te)\n nl_emitted = true\n end\n end\n elsif @version >= 27\n emit(:tBDOT3, '...'.freeze, @ts, dots_te)\n else\n emit(:tDOT3, '...'.freeze, @ts, dots_te)\n end\n\n if followed_by_nl && !nl_emitted\n # return \"\\n\" to process it on the next scan\n p = p - 1;\n end\n\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 321 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2136 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 187\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 443 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2490 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @paren_nest == 0\n diagnostic :warning, :triple_dot_at_eol, nil, range(@ts, @te - 1)\n end\n\n emit(:tDOT3, '...'.freeze, @ts, @te - 1)\n p = p - 1;\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 520 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2589 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = (@cs_before_block_comment)\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 517 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2594 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 107 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 999\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 3 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2611 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 465 then\n# line 636 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 463 then\n# line 637 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, chars)) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 468 then\n# line 638 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 466 then\n# line 639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 2); p -= 2 } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 464 then\n# line 640 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 2); p -= 2 } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 467 then\n# line 641 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 6); p -= 6 } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 456 then\n# line 645 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Float(chars))) } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 457 then\n# line 646 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars), @ts, @te - 2); p -= 2 } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 458 then\n# line 650 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 460 then\n# line 651 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 459 then\n# line 652 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars), @ts, @te - 6); p -= 6 } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 137 then\n# line 667 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 168 then\n# line 667 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 382 then\n# line 667 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 118 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 149 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 363 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 121 then\n# line 719 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 152 then\n# line 719 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 366 then\n# line 719 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 123 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 154 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 368 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 120 then\n# line 755 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 151 then\n# line 755 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 365 then\n# line 755 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 140 then\n# line 759 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 171 then\n# line 759 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 385 then\n# line 759 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 139 then\n# line 763 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 170 then\n# line 763 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 384 then\n# line 763 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 134 then\n# line 769 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 165 then\n# line 769 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 379 then\n# line 769 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 133 then\n# line 773 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 164 then\n# line 773 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 378 then\n# line 773 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 138 then\n# line 779 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 169 then\n# line 779 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 383 then\n# line 779 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 135 then\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 166 then\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 380 then\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 119 then\n# line 819 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 150 then\n# line 819 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 364 then\n# line 819 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 177 then\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 182 then\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 58 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 31 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1566 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 33 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1586 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 35 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1614 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 219 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1427 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 238 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 246 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1489 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 34 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 277 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1631 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 269 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1637 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 288 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1715 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 300 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1736 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 296 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1739 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 59 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1757 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 50 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te)\n p = ident_te - 1\n\n if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25\n @cs = 466;\n else\n @cs = 526;\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 318 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2133 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 402 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 398 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2163 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @in_kwarg\n p = p - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n \tbegin\n\t\t @cs = 187\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 411 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2191 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 408 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2194 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 187\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 495 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2508 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 427 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2511 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 991\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 101 then\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2604 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 268 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1533 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze, @te - 1, @te)\n else\n emit(:tLCURLY, '{'.freeze, @te - 1, @te)\n end\n @command_start = true\n @paren_nest += 1\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 290 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1699 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE_ARG, '{'.freeze)\n end\n @paren_nest += 1\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 393 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n @command_start = true\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE, '{'.freeze)\n end\n @paren_nest += 1\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 508 then\n# line 1071 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 2216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n if tok == '{'.freeze\n @paren_nest += 1\n end\n @command_start = true\n\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 509 then\n# line 1080 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n current_literal = literal\n if current_literal\n if current_literal.end_interp_brace_and_try_closing\n if version?(18, 19)\n emit(:tRCURLY, '}'.freeze, p - 1, p)\n @cond.lexpop\n @cmdarg.lexpop\n else\n emit(:tSTRING_DEND, '}'.freeze, p - 1, p)\n end\n\n if current_literal.saved_herebody_s\n @herebody_s = current_literal.saved_herebody_s\n end\n\n\n p = p - 1;\n @cs = (next_state_for_literal(current_literal));\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\n @paren_nest -= 1\n \t\tend\n# line 2453 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 811;\n else\n @cs = 532;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 61 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 65 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 222 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1427 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 237 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 249 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1489 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 271 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 286 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1715 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 298 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1736 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 324 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2133 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 400 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 410 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2191 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 431 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2508 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 103 then\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2604 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 243 then\n# line 1273 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1479 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg(cmd_state)); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 338 then\n# line 1273 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 309 then\n# line 1273 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 424 then\n# line 1273 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 145 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n if tok == '{'.freeze\n @paren_nest += 1\n end\n @command_start = true\n\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 146 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 353; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 148 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 149 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 150 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 540; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 151 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 152 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg(cmd_state));\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 153 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 154 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 156 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 157 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 158 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 160 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 164 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 165 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 167 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(PUNCTUATION);\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 168 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 244 then\n# line 1274 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1479 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg(cmd_state)); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 339 then\n# line 1274 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 311 then\n# line 1274 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 425 then\n# line 1274 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2417 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 495; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 340 then\n# line 1279 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 312 then\n# line 1279 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 341 then\n# line 1280 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 313 then\n# line 1280 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 345 then\n# line 1281 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 316 then\n# line 1281 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 344 then\n# line 1282 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 315 then\n# line 1282 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 99 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 106 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\twhen 117 then\n\tbegin begin p = (( @te))-1; end\n\n if @version >= 27\n emit(:tPIPE, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1;\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n p -= 2\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\twhen 121 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 540; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 123 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @command_start = true\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 127 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 128 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 132 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 342 then\n# line 1283 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 314 then\n# line 1283 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 343 then\n# line 1288 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1885 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 490 then\n# line 1293 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2399 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT, tok(@ts, tm), @ts, tm)\n p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 267 then\n# line 1299 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze, @te - 1, @te)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 386 then\n# line 1299 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1993 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 493 then\n# line 1299 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 2486 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK2, '['.freeze)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 496 then\n# line 1305 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2453 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 811;\n else\n @cs = 532;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 259 then\n# line 1312 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1508 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if version?(18)\n emit(:tLPAREN2, '('.freeze, @te - 1, @te)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 272 then\n# line 1312 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1521 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN2, '('.freeze)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 280 then\n# line 1312 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1653 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n if version?(18)\n @cs = 803; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 328 then\n# line 1312 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1998 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN, '('.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 437 then\n# line 1312 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 2449 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 438 then\n# line 1322 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2453 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 811;\n else\n @cs = 532;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 72 then\n# line 1806 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 353 then\n# line 1807 \"lib/parser/lexer.rl\"\n\t\tbegin\n new_herebody_s = p \t\tend\n# line 1808 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n tok(@ts, heredoc_e) =~ /^<<(-?)(~?)([\"'`]?)(.*)\\3$/m\n\n indent = !$1.empty? || !$2.empty?\n dedent_body = !$2.empty?\n type = $3.empty? ? '<<\"'.freeze : ('<<'.freeze + $3)\n delimiter = $4\n\n if @version >= 27\n if delimiter.count(\"\\n\") > 0 || delimiter.count(\"\\r\") > 0\n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n elsif @version >= 24\n if delimiter.count(\"\\n\") > 0\n if delimiter.end_with?(\"\\n\")\n diagnostic :warning, :heredoc_id_ends_with_nl, nil, range(@ts, @ts + 1)\n delimiter = delimiter.rstrip\n else\n diagnostic :fatal, :heredoc_id_has_newline, nil, range(@ts, @ts + 1)\n end\n end\n end\n\n if dedent_body && version?(18, 19, 20, 21, 22)\n emit(:tLSHFT, '<<'.freeze, @ts, @ts + 2)\n p = @ts + 1\n @cs = 564; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (push_literal(type, delimiter, @ts, heredoc_e, indent, dedent_body));\n\n @herebody_s ||= new_herebody_s\n p = @herebody_s - 1\n end\n end\n\t\tend\n\twhen 348 then\n# line 1898 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 1; diag_msg = :ivar_name \t\tend\n# line 1901 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te)\n else\n emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = @ts\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 351 then\n# line 1899 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2; diag_msg = :cvar_name \t\tend\n# line 1901 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te)\n else\n emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = @ts\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 359 then\n# line 1919 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 389 then\n# line 2008 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2009 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 540; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 480 then\n# line 2301 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 474 then\n# line 2302 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 477 then\n# line 2303 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 471 then\n# line 2304 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 486 then\n# line 2305 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 449 then\n# line 2306 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 487 then\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 8 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 453 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2365 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 228 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1386 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 43;\t\tend\n\twhen 215 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1390 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 44;\t\tend\n\twhen 211 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1394 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 45;\t\tend\n\twhen 27 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1570 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 72;\t\tend\n\twhen 261 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1587 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 28 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 78;\t\tend\n\twhen 254 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1631 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 79;\t\tend\n\twhen 281 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1663 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 85;\t\tend\n\twhen 46 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 86;\t\tend\n\twhen 302 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1730 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 93;\t\tend\n\twhen 291 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1734 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 94;\t\tend\n\twhen 70 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1854 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 106;\t\tend\n\twhen 394 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1962 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 117;\t\tend\n\twhen 305 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2003 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 121;\t\tend\n\twhen 388 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2009 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 122;\t\tend\n\twhen 387 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2015 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 123;\t\tend\n\twhen 75 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 127;\t\tend\n\twhen 303 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1327 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 128;\t\tend\n\twhen 306 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2152 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 132;\t\tend\n\twhen 504 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2216 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 145;\t\tend\n\twhen 499 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2245 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 146;\t\tend\n\twhen 507 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2255 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 148;\t\tend\n\twhen 500 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2260 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 149;\t\tend\n\twhen 501 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 150;\t\tend\n\twhen 506 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2269 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 151;\t\tend\n\twhen 498 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2280 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 152;\t\tend\n\twhen 492 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2294 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 153;\t\tend\n\twhen 418 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 154;\t\tend\n\twhen 451 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2352 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 157;\t\tend\n\twhen 82 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 158;\t\tend\n\twhen 421 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2395 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 160;\t\tend\n\twhen 412 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1327 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 164;\t\tend\n\twhen 423 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2417 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 165;\t\tend\n\twhen 416 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2443 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 167;\t\tend\n\twhen 422 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2449 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 168;\t\tend\n\twhen 88 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2540 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 181;\t\tend\n\twhen 511 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2579 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 185;\t\tend\n\twhen 183 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 957 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 124 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 155 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 369 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 131 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 162 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 376 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 122 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 153 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 367 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 130 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 161 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 375 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 126 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 157 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 371 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 129 then\n# line 748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 160 then\n# line 748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 374 then\n# line 748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 136 then\n# line 779 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 167 then\n# line 779 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 381 then\n# line 779 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 116 then\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 819 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 148 then\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 819 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 362 then\n# line 825 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 819 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 62 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 221 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1427 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 236 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1457 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 248 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1489 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 270 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 811\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 285 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1715 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 297 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1736 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 323 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2133 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 399 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2160 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 409 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2191 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 430 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2508 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 102 then\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2604 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 482 then\n# line 2305 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 446 then\n# line 2306 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 461 then\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 454 then\n# line 2364 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 644 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 450 then\n# line 2365 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 644 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 276 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1631 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 79;\t\tend\n\twhen 36 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 78;\t\tend\n\twhen 47 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1676 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 86;\t\tend\n\twhen 94 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2540 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 181;\t\tend\n\twhen 66 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1757 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 99;\t\tend\n\twhen 85 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2249 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 147;\t\tend\n\twhen 93 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2540 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 181;\t\tend\n\twhen 38 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1586 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1587 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 391 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2008 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2106 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 127;\t\tend\n\twhen 390 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2008 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1327 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 128;\t\tend\n\twhen 483 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2305 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 154;\t\tend\n\twhen 127 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 158 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 372 then\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n\n if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U'\n diagnostic :fatal, :invalid_escape\n end\n\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 125 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 156 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 370 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 132 then\n# line 741 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 163 then\n# line 741 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 377 then\n# line 741 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 128 then\n# line 748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 159 then\n# line 748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 898 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char)\n # Ruby >= 3.1 escapes \\c- and \\m chars, that's the only escape sequence\n # supported by regexes so far, so it needs a separate branch.\n current_literal.extend_string(@escape, @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 373 then\n# line 748 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(p - 2, p).to_i(16)) \t\tend\n# line 731 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 735 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1921 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 811; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 478 then\n# line 2301 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 472 then\n# line 2302 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 475 then\n# line 2303 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 469 then\n# line 2304 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 481 then\n# line 2305 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 445 then\n# line 2306 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2308 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 32 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 865 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1586 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1587 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 63 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1757 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 99;\t\tend\n\twhen 92 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1229 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1232 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2540 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 181;\t\tend\n\twhen 488 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2340 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 156;\t\tend\n\twhen 484 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2305 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2340 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 156;\t\tend\n\twhen 448 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2306 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2307 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2340 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 156;\t\tend\n# line 24782 \"lib/parser/lexer.rb\"\n\tend\n\tend\n\tend\n\tif _goto_level <= _again\n\tcase _lex_to_state_actions[ @cs] \n\twhen 96 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = nil;\t\tend\n# line 24792 \"lib/parser/lexer.rb\"\n\tend\n\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tp += 1\n\tif p != pe\n\t\t_goto_level = _resume\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _test_eof\n\tif p == eof\n\tif _lex_eof_trans[ @cs] > 0\n\t\t_trans = _lex_eof_trans[ @cs] - 1;\n\t\t_goto_level = _eof_trans\n\t\tnext;\n\tend\n\tend\n\n\tend\n\tif _goto_level <= _out\n\t\tbreak\n\tend\nend\n\tend\n\n# line 286 \"lib/parser/lexer.rl\"\n # %\n\n # Ragel creates a local variable called `testEof` but it doesn't use\n # it in any assignment. This dead code is here to swallow the warning.\n # It has no runtime cost because Ruby doesn't produce any instructions from it.\n if false\n testEof\n end\n\n @p = p\n\n if @token_queue.any?\n @token_queue.shift\n elsif @cs == klass.lex_error\n [ false, [ '$error'.freeze, range(p - 1, p) ] ]\n else\n eof = @source_pts.size\n [ false, [ '$eof'.freeze, range(eof, eof) ] ]\n end\n end",
"def advance\n if @token_queue.any?\n return @token_queue.shift\n end\n\n # Ugly, but dependent on Ragel output. Consider refactoring it somehow.\n klass = self.class\n _lex_trans_keys = klass.send :_lex_trans_keys\n _lex_key_spans = klass.send :_lex_key_spans\n _lex_index_offsets = klass.send :_lex_index_offsets\n _lex_indicies = klass.send :_lex_indicies\n _lex_trans_targs = klass.send :_lex_trans_targs\n _lex_trans_actions = klass.send :_lex_trans_actions\n _lex_to_state_actions = klass.send :_lex_to_state_actions\n _lex_from_state_actions = klass.send :_lex_from_state_actions\n _lex_eof_trans = klass.send :_lex_eof_trans\n\n pe = @source_pts.size + 2\n p, eof = @p, pe\n\n cmd_state = @command_start\n @command_start = false\n\n \n# line 10991 \"lib/parser/lexer.rb\"\nbegin\n\ttestEof = false\n\t_slen, _trans, _keys, _inds, _acts, _nacts = nil\n\t_goto_level = 0\n\t_resume = 10\n\t_eof_trans = 15\n\t_again = 20\n\t_test_eof = 30\n\t_out = 40\n\twhile true\n\tif _goto_level <= 0\n\tif p == pe\n\t\t_goto_level = _test_eof\n\t\tnext\n\tend\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _resume\n\tcase _lex_from_state_actions[ @cs] \n\twhen 97 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = p\n\t\tend\n# line 11019 \"lib/parser/lexer.rb\"\n\tend\n\t_keys = @cs << 1\n\t_inds = _lex_index_offsets[ @cs]\n\t_slen = _lex_key_spans[ @cs]\n\t_wide = ( (@source_pts[p] || 0))\n\t_trans = if ( _slen > 0 && \n\t\t\t_lex_trans_keys[_keys] <= _wide && \n\t\t\t_wide <= _lex_trans_keys[_keys + 1] \n\t\t ) then\n\t\t\t_lex_indicies[ _inds + _wide - _lex_trans_keys[_keys] ] \n\t\t else \n\t\t\t_lex_indicies[ _inds + _slen ]\n\t\t end\n\tend\n\tif _goto_level <= _eof_trans\n\t @cs = _lex_trans_targs[_trans]\n\tif _lex_trans_actions[_trans] != 0\n\tcase _lex_trans_actions[_trans]\n\twhen 28 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 117 then\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 29 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n\twhen 59 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n\twhen 63 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 304 then\n# line 1261 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 36 then\n# line 1542 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 38 then\n# line 1562 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 40 then\n# line 1590 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 70 then\n# line 1782 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n\twhen 343 then\n# line 1874 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 1; diag_msg = :ivar_name \t\tend\n\twhen 346 then\n# line 1875 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2; diag_msg = :cvar_name \t\tend\n\twhen 354 then\n# line 1895 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n\twhen 383 then\n# line 1984 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 302 then\n# line 2072 \"lib/parser/lexer.rl\"\n\t\tbegin\n ident_tok = tok; ident_ts = @ts; ident_te = @te; \t\tend\n\twhen 470 then\n# line 2258 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n\twhen 464 then\n# line 2259 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n\twhen 467 then\n# line 2260 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n\twhen 461 then\n# line 2261 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n\twhen 476 then\n# line 2262 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n\twhen 438 then\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n\twhen 453 then\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 446 then\n# line 2321 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 443 then\n# line 2322 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 88 then\n# line 2510 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 7 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 113 then\n# line 1094 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 773;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 5 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 109 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 108 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 115 then\n# line 1036 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 114 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 111 then\n# line 1007 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 112 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 6 then\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 4 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 142 then\n# line 1094 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 773;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 10 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 139 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 138 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 144 then\n# line 1036 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 143 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 141 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 11 then\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 9 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 167 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 166 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 169 then\n# line 1007 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 170 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 173 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 172 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 175 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 182 then\n# line 1094 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 773;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 13 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 179 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 178 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 184 then\n# line 1036 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 183 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 181 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 12 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 186 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 185 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 193 then\n# line 1094 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @command_start = true\n @cs = 773;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 15 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 189 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 188 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 195 then\n# line 1036 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n literal.extend_string(tok, @ts, @te)\n else\n message = tok.start_with?('#@@') ? :cvar_name : :ivar_name\n diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te)\n end\n end\n\t\tend\n\twhen 194 then\n# line 1020 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 191 then\n# line 1007 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 192 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 14 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 197 then\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 196 then\n# line 866 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 766;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 199 then\n# line 1007 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 200 then\n# line 1191 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tREGEXP_OPT, tok(@ts, @te - 1), @ts, @te - 1)\n p = p - 1;\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 201 then\n# line 1178 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n unknown_options = tok.scan(/[^imxouesn]/)\n if unknown_options.any?\n diagnostic :error, :regexp_options,\n { :options => unknown_options.join }\n end\n\n emit(:tREGEXP_OPT)\n @cs = 781;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 16 then\n# line 1330 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 202 then\n# line 1330 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 204 then\n# line 1343 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@@[0-9]/\n diagnostic :error, :cvar_name, { :name => tok }\n end\n\n emit(:tCVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 203 then\n# line 1353 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@[0-9]/\n diagnostic :error, :ivar_name, { :name => tok }\n end\n\n emit(:tIVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 225 then\n# line 1374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(KEYWORDS_BEGIN);\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 211 then\n# line 1382 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tIDENTIFIER)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 18 then\n# line 1386 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n @cs = 781; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 208 then\n# line 1395 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 220 then\n# line 1399 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 20 then\n# line 1405 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if version?(23)\n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 207 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 206 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 224 then\n# line 1374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS_BEGIN);\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 221 then\n# line 1378 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 223 then\n# line 1382 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 218 then\n# line 1386 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n @cs = 781; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 214 then\n# line 1395 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 219 then\n# line 1402 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 212 then\n# line 1415 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 217 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 19 then\n# line 1395 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit_table(PUNCTUATION)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 17 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 210 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 43 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN);\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 44 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 45 then\n\tbegin begin p = (( @te))-1; end\n emit(:tIDENTIFIER)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 22 then\n# line 1430 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n p = p - 1; @cs = 766; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 227 then\n# line 1436 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 226 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 229 then\n# line 1433 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 228 then\n# line 1436 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 21 then\n# line 1436 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 235 then\n# line 1462 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 234 then\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 233 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 245 then\n# line 1447 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 236 then\n# line 1451 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 241 then\n# line 1462 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 239 then\n# line 1465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 244 then\n# line 1468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 268 then\n# line 1528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 251 then\n# line 1546 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n if @version < 30\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n else\n diagnostic :warning, :ambiguous_regexp, nil, range(tm, tm + 1)\n end\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 257 then\n# line 1574 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 24 then\n# line 1582 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 259 then\n# line 1591 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 39 then\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 246 then\n# line 1616 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 247 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 258 then\n# line 1537 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 254 then\n# line 1563 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 256 then\n# line 1579 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 250 then\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 249 then\n# line 1607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 267 then\n# line 1616 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 25 then\n# line 1607 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 41 then\n# line 1616 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 23 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 71 then\n\tbegin begin p = (( @te))-1; end\n\n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n if @version < 30\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n else\n diagnostic :warning, :ambiguous_regexp, nil, range(tm, tm + 1)\n end\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 72 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 77 then\n\tbegin begin p = (( @te))-1; end\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\telse\n\tbegin begin p = (( @te))-1; end\nend\nend \n\t\t\tend\n\twhen 43 then\n# line 1652 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 272 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 273 then\n# line 1652 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 44 then\n# line 1652 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 42 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 84 then\n\tbegin begin p = (( @te))-1; end\n\n if @cond.active?\n emit(:kDO_COND, 'do'.freeze, @te - 2, @te)\n else\n emit(:kDO, 'do'.freeze, @te - 2, @te)\n end\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 85 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 474\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 283 then\n# line 1688 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_do(true)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 276 then\n# line 1694 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 277 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 278 then\n# line 1691 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 281 then\n# line 1694 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 287 then\n# line 1718 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 286 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 295 then\n# line 1710 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 289 then\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 293 then\n# line 1718 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 288 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 92 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 93 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 56 then\n# line 1733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 328 then\n# line 1750 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type = delimiter = tok[0].chr\n p = p - 1; \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 320 then\n# line 1757 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = @source_buffer.slice(@ts).chr, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 54 then\n# line 1764 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 341 then\n# line 1839 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1; p = p - 1;\n emit(:tSYMBEG, tok(@ts, @ts + 1), @ts, @ts + 1)\n \tbegin\n\t\t @cs = 333\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 329 then\n# line 1847 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 340 then\n# line 1855 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1, @ts + 2))\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 68 then\n# line 1869 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 351 then\n# line 1910 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 350 then\n# line 1920 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 330 then\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 51 then\n# line 2000 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1;\n\n if version?(18)\n ident = tok(@ts, @te - 2)\n\n emit((@source_buffer.slice(@ts) =~ /[A-Z]/) ? :tCONSTANT : :tIDENTIFIER,\n ident, @ts, @te - 2)\n p = p - 1; # continue as a symbol\n\n if !@static_env.nil? && @static_env.declared?(ident)\n @cs = 781;\n else\n @cs = (arg_or_cmdarg(cmd_state));\n end\n else\n emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n @cs = 766;\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 327 then\n# line 2038 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @version >= 30\n if @lambda_stack.any? && @lambda_stack.last + 1 == @paren_nest\n # To reject `->(...)` like `->...`\n emit(:tDOT3)\n else\n emit(:tBDOT3)\n end\n elsif @version >= 27\n emit(:tBDOT3)\n else\n emit(:tDOT3)\n end\n\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 48 then\n# line 2074 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te)\n p = ident_te - 1\n\n if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25\n @cs = 446;\n else\n @cs = 505;\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 314 then\n# line 2093 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 185\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 55 then\n# line 2109 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 298 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 325 then\n# line 1733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 324 then\n# line 1740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tSTAR, '*'.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 321 then\n# line 1764 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 319 then\n# line 1770 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 348 then\n# line 1830 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 331 then\n# line 1869 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 344 then\n# line 1877 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te)\n else\n emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = @ts\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 349 then\n# line 1920 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 355 then\n# line 1926 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 322 then\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 326 then\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n emit(:tBDOT2)\n else\n emit(:tDOT2)\n end\n\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 301 then\n# line 1315 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 311 then\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 313 then\n# line 2093 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 185\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 316 then\n# line 2109 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 53 then\n# line 1770 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 72 then\n# line 1830 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 73 then\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 47 then\n# line 1315 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 52 then\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 67 then\n# line 2109 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 50 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 98 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 105 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\twhen 116 then\n\tbegin begin p = (( @te))-1; end\n\n if @version >= 27\n emit(:tPIPE, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1;\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n p -= 2\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\twhen 120 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 121 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 126 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 127 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 131 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 386 then\n# line 2129 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 387 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 388 then\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 392 then\n# line 2129 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 76 then\n# line 2139 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 396 then\n# line 2144 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n \tbegin\n\t\t @cs = (push_literal(tok, tok, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 395 then\n# line 2154 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 394 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 398 then\n# line 2148 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 397 then\n# line 2154 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 75 then\n# line 2154 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 543\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 431 then\n# line 2165 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tLAMBDA, '->'.freeze, @ts, @ts + 2)\n\n @lambda_stack.push @paren_nest\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 85 then\n# line 2206 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 406 then\n# line 2342 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts, nil, false, false, true))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 78 then\n# line 2360 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 427 then\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 453; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 480 then\n# line 2391 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 420 then\n# line 2400 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION);\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 425 then\n# line 2435 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tOP_ASGN, tok(@ts, @te - 1))\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 411 then\n# line 2439 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tEH, '?'.freeze)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 408 then\n# line 2458 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 410 then\n# line 2471 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tSEMI, ';'.freeze)\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 485 then\n# line 2475 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :error, :bare_backslash, nil, range(@ts, @ts + 1)\n p = p - 1;\n end\n\t\tend\n\twhen 405 then\n# line 2481 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 404 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 496 then\n# line 2202 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 333; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 494 then\n# line 2206 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 493 then\n# line 2217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 435 then\n# line 2292 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 482 then\n# line 2352 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 424 then\n# line 2360 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 328\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 432 then\n# line 2367 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 453; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 488 then\n# line 1315 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 430 then\n# line 2391 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 426 then\n# line 2400 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION);\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 419 then\n# line 2406 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 433 then\n# line 2458 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 417 then\n# line 2465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 423 then\n# line 2481 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 83 then\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 79 then\n# line 2292 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 82 then\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 77 then\n# line 2481 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 80 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 144 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n if tok == '{'.freeze\n @paren_nest += 1\n end\n @command_start = true\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 145 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 333; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 146 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 148 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 149 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 150 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 151 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg(cmd_state));\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 152 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 153 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 155 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 156 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 157 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 159 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 163 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 164 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 166 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(PUNCTUATION);\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 167 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 95 then\n# line 2511 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 501 then\n# line 2514 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 504 then\n# line 2497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version < 27\n # Ruby before 2.7 doesn't support comments before leading dot.\n # If a line after \"a\" starts with a comment then \"a\" is a self-contained statement.\n # So in that case we emit a special tNL token and start reading the\n # next line as a separate statement.\n #\n # Note: block comments before leading dot are not supported on any version of Ruby.\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 503 then\n# line 2514 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 90 then\n# line 2497 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n if @version < 27\n # Ruby before 2.7 doesn't support comments before leading dot.\n # If a line after \"a\" starts with a comment then \"a\" is a self-contained statement.\n # So in that case we emit a special tNL token and start reading the\n # next line as a separate statement.\n #\n # Note: block comments before leading dot are not supported on any version of Ruby.\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 86 then\n# line 2514 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 91 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 180 then\n\tbegin begin p = (( @te))-1; end\n\n if @version < 27\n # Ruby before 2.7 doesn't support comments before leading dot.\n # If a line after \"a\" starts with a comment then \"a\" is a self-contained statement.\n # So in that case we emit a special tNL token and start reading the\n # next line as a separate statement.\n #\n # Note: block comments before leading dot are not supported on any version of Ruby.\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 182 then\n\tbegin begin p = (( @te))-1; end\n emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 185; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 507 then\n# line 2524 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = (@cs_before_block_comment)\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 506 then\n# line 2532 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :embedded_document, nil,\n range(@eq_begin_s, @eq_begin_s + '=begin'.length)\n end\n\t\tend\n\twhen 106 then\n# line 2542 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 967\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 2 then\n# line 2546 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 98 then\n# line 2549 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin cmd_state = true; p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 99 then\n# line 527 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 100 then\n# line 2539 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 105 then\n# line 2542 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 967\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 104 then\n# line 2549 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin cmd_state = true; p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 1 then\n# line 2549 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin cmd_state = true; p = p - 1; \tbegin\n\t\t @cs = 773\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 66 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 110 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 140 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 168 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 174 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 180 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 187 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 190 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 198 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 269 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1528 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 260 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1591 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 252 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 352 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1910 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 315 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2093 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 185\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 434 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2447 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if @paren_nest == 0\n diagnostic :warning, :triple_dot_at_eol, nil, range(@ts, @te - 1)\n end\n\n emit(:tDOT3, '...'.freeze, @ts, @te - 1)\n p = p - 1;\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 508 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2524 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = (@cs_before_block_comment)\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 505 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2529 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 107 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2542 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 967\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 3 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2546 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 456 then\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 454 then\n# line 636 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, chars)) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 459 then\n# line 637 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 457 then\n# line 638 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 2); p -= 2 } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 455 then\n# line 639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 2); p -= 2 } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 458 then\n# line 640 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 6); p -= 6 } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 447 then\n# line 644 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Float(chars))) } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 448 then\n# line 645 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars), @ts, @te - 2); p -= 2 } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 449 then\n# line 649 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 451 then\n# line 650 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 450 then\n# line 651 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars), @ts, @te - 6); p -= 6 } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 134 then\n# line 666 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 162 then\n# line 666 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 373 then\n# line 666 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 118 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 146 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 357 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 121 then\n# line 713 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 149 then\n# line 713 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 360 then\n# line 713 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 123 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 151 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 362 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 120 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 148 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 359 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 137 then\n# line 751 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 165 then\n# line 751 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 376 then\n# line 751 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 136 then\n# line 755 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 164 then\n# line 755 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 375 then\n# line 755 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 131 then\n# line 761 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 159 then\n# line 761 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 370 then\n# line 761 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 130 then\n# line 765 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 158 then\n# line 765 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 369 then\n# line 765 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 135 then\n# line 771 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 163 then\n# line 771 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 374 then\n# line 771 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 132 then\n# line 785 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 160 then\n# line 785 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 371 then\n# line 785 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 119 then\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 147 then\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 358 then\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 171 then\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 176 then\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 57 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 30 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1542 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 32 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1562 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 34 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1590 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 213 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1415 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 232 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1433 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 240 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 33 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 271 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 263 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1613 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 282 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1691 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 294 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 290 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1715 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 58 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 49 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2074 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te)\n p = ident_te - 1\n\n if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25\n @cs = 446;\n else\n @cs = 505;\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 312 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 393 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 389 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2120 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @in_kwarg\n p = p - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n \tbegin\n\t\t @cs = 185\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 402 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2148 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 399 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2151 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 185\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 486 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 418 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2468 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 961\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 101 then\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2539 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 262 then\n# line 1059 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1509 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze, @te - 1, @te)\n else\n emit(:tLCURLY, '{'.freeze, @te - 1, @te)\n end\n @command_start = true\n @paren_nest += 1\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 284 then\n# line 1059 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1675 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE_ARG, '{'.freeze)\n end\n @paren_nest += 1\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 384 then\n# line 1059 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1955 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n @command_start = true\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE, '{'.freeze)\n end\n @paren_nest += 1\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 499 then\n# line 1059 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 2173 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n if tok == '{'.freeze\n @paren_nest += 1\n end\n @command_start = true\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 500 then\n# line 1068 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n current_literal = literal\n if current_literal\n if current_literal.end_interp_brace_and_try_closing\n if version?(18, 19)\n emit(:tRCURLY, '}'.freeze, p - 1, p)\n @cond.lexpop\n @cmdarg.lexpop\n else\n emit(:tSTRING_DEND, '}'.freeze, p - 1, p)\n end\n\n if current_literal.saved_herebody_s\n @herebody_s = current_literal.saved_herebody_s\n end\n\n\n p = p - 1;\n @cs = (next_state_for_literal(current_literal));\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\n @paren_nest -= 1\n \t\tend\n# line 2410 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 781;\n else\n @cs = 511;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 60 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 64 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 216 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1415 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 231 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1433 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 243 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 265 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1610 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 280 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1691 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 292 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 318 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 391 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 401 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2148 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 422 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 103 then\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2539 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 237 then\n# line 1261 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1455 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg(cmd_state)); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 332 then\n# line 1261 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 303 then\n# line 1261 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 415 then\n# line 1261 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 144 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n if tok == '{'.freeze\n @paren_nest += 1\n end\n @command_start = true\n\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 145 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 333; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 146 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 148 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 149 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 150 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 151 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg(cmd_state));\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 152 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 153 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 155 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 156 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 157 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 159 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 163 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 164 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 166 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(PUNCTUATION);\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 167 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 238 then\n# line 1262 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1455 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg(cmd_state)); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 333 then\n# line 1262 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 305 then\n# line 1262 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 416 then\n# line 1262 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 474; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 334 then\n# line 1267 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 306 then\n# line 1267 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 335 then\n# line 1268 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 307 then\n# line 1268 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 339 then\n# line 1269 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 310 then\n# line 1269 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 338 then\n# line 1270 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 309 then\n# line 1270 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 98 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 105 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n\twhen 116 then\n\tbegin begin p = (( @te))-1; end\n\n if @version >= 27\n emit(:tPIPE, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1;\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n p -= 2\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\twhen 120 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 121 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @command_start = true\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 126 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 127 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 446; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg(cmd_state)); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 131 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 336 then\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 308 then\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 337 then\n# line 1276 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1861 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 481 then\n# line 1281 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2356 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT, tok(@ts, tm), @ts, tm)\n p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 261 then\n# line 1287 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1503 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze, @te - 1, @te)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 377 then\n# line 1287 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1969 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 484 then\n# line 1287 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 2443 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK2, '['.freeze)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 487 then\n# line 1293 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2410 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 781;\n else\n @cs = 511;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 253 then\n# line 1300 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1484 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if version?(18)\n emit(:tLPAREN2, '('.freeze, @te - 1, @te)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 266 then\n# line 1300 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN2, '('.freeze)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 274 then\n# line 1300 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1629 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n if version?(18)\n @cs = 773; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 323 then\n# line 1300 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 1974 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN, '('.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 428 then\n# line 1300 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n\n if version?(18)\n @command_start = true\n end\n \t\tend\n# line 2406 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 429 then\n# line 1310 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2410 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 781;\n else\n @cs = 511;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 71 then\n# line 1782 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 347 then\n# line 1783 \"lib/parser/lexer.rl\"\n\t\tbegin\n new_herebody_s = p \t\tend\n# line 1784 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n tok(@ts, heredoc_e) =~ /^<<(-?)(~?)([\"'`]?)(.*)\\3$/m\n\n indent = !$1.empty? || !$2.empty?\n dedent_body = !$2.empty?\n type = $3.empty? ? '<<\"'.freeze : ('<<'.freeze + $3)\n delimiter = $4\n\n if @version >= 27\n if delimiter.count(\"\\n\") > 0 || delimiter.count(\"\\r\") > 0\n diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1)\n end\n elsif @version >= 24\n if delimiter.count(\"\\n\") > 0\n if delimiter.end_with?(\"\\n\")\n diagnostic :warning, :heredoc_id_ends_with_nl, nil, range(@ts, @ts + 1)\n delimiter = delimiter.rstrip\n else\n diagnostic :fatal, :heredoc_id_has_newline, nil, range(@ts, @ts + 1)\n end\n end\n end\n\n if dedent_body && version?(18, 19, 20, 21, 22)\n emit(:tLSHFT, '<<'.freeze, @ts, @ts + 2)\n p = @ts + 1\n @cs = 543; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (push_literal(type, delimiter, @ts, heredoc_e, indent, dedent_body));\n\n @herebody_s ||= new_herebody_s\n p = @herebody_s - 1\n end\n end\n\t\tend\n\twhen 342 then\n# line 1874 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 1; diag_msg = :ivar_name \t\tend\n# line 1877 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te)\n else\n emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = @ts\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 345 then\n# line 1875 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2; diag_msg = :cvar_name \t\tend\n# line 1877 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @version >= 27\n diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te)\n else\n emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = @ts\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 353 then\n# line 1895 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 380 then\n# line 1984 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1985 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 519; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 471 then\n# line 2258 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 465 then\n# line 2259 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 468 then\n# line 2260 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 462 then\n# line 2261 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 477 then\n# line 2262 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 440 then\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 478 then\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 89 then\n# line 2510 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2511 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 8 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 444 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2322 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 222 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 43;\t\tend\n\twhen 209 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1378 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 44;\t\tend\n\twhen 205 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1382 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 45;\t\tend\n\twhen 26 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1546 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 71;\t\tend\n\twhen 255 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1563 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 72;\t\tend\n\twhen 27 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 77;\t\tend\n\twhen 248 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 78;\t\tend\n\twhen 275 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 84;\t\tend\n\twhen 45 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1652 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 85;\t\tend\n\twhen 296 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1706 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 92;\t\tend\n\twhen 285 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1710 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 93;\t\tend\n\twhen 69 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1830 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 105;\t\tend\n\twhen 385 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1938 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 116;\t\tend\n\twhen 299 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 120;\t\tend\n\twhen 379 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1985 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 121;\t\tend\n\twhen 378 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1991 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 122;\t\tend\n\twhen 74 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 126;\t\tend\n\twhen 297 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1315 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 127;\t\tend\n\twhen 300 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2109 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 131;\t\tend\n\twhen 495 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2173 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 144;\t\tend\n\twhen 490 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2202 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 145;\t\tend\n\twhen 498 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2212 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 147;\t\tend\n\twhen 491 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 148;\t\tend\n\twhen 492 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2222 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 149;\t\tend\n\twhen 497 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2226 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 150;\t\tend\n\twhen 489 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2237 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 151;\t\tend\n\twhen 483 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2251 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 152;\t\tend\n\twhen 409 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 153;\t\tend\n\twhen 442 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2309 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 156;\t\tend\n\twhen 81 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 157;\t\tend\n\twhen 412 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2352 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 159;\t\tend\n\twhen 403 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1315 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 163;\t\tend\n\twhen 414 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 164;\t\tend\n\twhen 407 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2400 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 166;\t\tend\n\twhen 413 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2406 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 167;\t\tend\n\twhen 87 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 180;\t\tend\n\twhen 502 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2514 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 182;\t\tend\n\twhen 177 then\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 945 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 124 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 152 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 363 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 129 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 157 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 368 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 122 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 150 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 361 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 128 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 156 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 367 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 126 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 154 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 365 then\n# line 740 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 133 then\n# line 771 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 785 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 161 then\n# line 771 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 785 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 372 then\n# line 771 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 785 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 116 then\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 145 then\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 356 then\n# line 817 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 811 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 61 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 215 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1415 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 230 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1433 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 242 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 264 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1610 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 781\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 279 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1691 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 291 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 317 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2090 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 390 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 400 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2148 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 421 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 102 then\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2539 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 473 then\n# line 2262 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 437 then\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 452 then\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 445 then\n# line 2321 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 441 then\n# line 2322 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 643 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2324 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 270 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 501 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1607 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 78;\t\tend\n\twhen 35 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 77;\t\tend\n\twhen 46 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1652 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 85;\t\tend\n\twhen 94 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 180;\t\tend\n\twhen 65 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 98;\t\tend\n\twhen 84 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2206 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 146;\t\tend\n\twhen 93 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 180;\t\tend\n\twhen 37 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1562 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1563 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 72;\t\tend\n\twhen 382 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1984 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2063 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 126;\t\tend\n\twhen 381 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1984 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1315 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 127;\t\tend\n\twhen 474 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2262 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 153;\t\tend\n\twhen 127 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 155 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 366 then\n# line 706 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 125 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 153 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 890 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 364 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = @source_buffer.slice(p - 1).chr\n\n if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord))\n diagnostic :fatal, :invalid_escape\n end\n \t\tend\n# line 725 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1897 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 781; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 469 then\n# line 2258 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 463 then\n# line 2259 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 466 then\n# line 2260 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 460 then\n# line 2261 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 472 then\n# line 2262 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 436 then\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2265 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 31 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 857 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1562 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1563 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 72;\t\tend\n\twhen 62 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 98;\t\tend\n\twhen 92 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1220 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2497 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 180;\t\tend\n\twhen 479 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2297 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 155;\t\tend\n\twhen 475 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2262 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2297 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 155;\t\tend\n\twhen 439 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2264 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2297 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 155;\t\tend\n# line 23534 \"lib/parser/lexer.rb\"\n\tend\n\tend\n\tend\n\tif _goto_level <= _again\n\tcase _lex_to_state_actions[ @cs] \n\twhen 96 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = nil;\t\tend\n# line 23544 \"lib/parser/lexer.rb\"\n\tend\n\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tp += 1\n\tif p != pe\n\t\t_goto_level = _resume\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _test_eof\n\tif p == eof\n\tif _lex_eof_trans[ @cs] > 0\n\t\t_trans = _lex_eof_trans[ @cs] - 1;\n\t\t_goto_level = _eof_trans\n\t\tnext;\n\tend\n\tend\n\n\tend\n\tif _goto_level <= _out\n\t\tbreak\n\tend\nend\n\tend\n\n# line 286 \"lib/parser/lexer.rl\"\n # %\n\n # Ragel creates a local variable called `testEof` but it doesn't use\n # it in any assignment. This dead code is here to swallow the warning.\n # It has no runtime cost because Ruby doesn't produce any instructions from it.\n if false\n testEof\n end\n\n @p = p\n\n if @token_queue.any?\n @token_queue.shift\n elsif @cs == klass.lex_error\n [ false, [ '$error'.freeze, range(p - 1, p) ] ]\n else\n eof = @source_pts.size\n [ false, [ '$eof'.freeze, range(eof, eof) ] ]\n end\n end",
"def advance\n if @token_queue.any?\n return @token_queue.shift\n end\n\n # Ugly, but dependent on Ragel output. Consider refactoring it somehow.\n klass = self.class\n _lex_trans_keys = klass.send :_lex_trans_keys\n _lex_key_spans = klass.send :_lex_key_spans\n _lex_index_offsets = klass.send :_lex_index_offsets\n _lex_indicies = klass.send :_lex_indicies\n _lex_trans_targs = klass.send :_lex_trans_targs\n _lex_trans_actions = klass.send :_lex_trans_actions\n _lex_to_state_actions = klass.send :_lex_to_state_actions\n _lex_from_state_actions = klass.send :_lex_from_state_actions\n _lex_eof_trans = klass.send :_lex_eof_trans\n\n pe = @source_pts.size + 2\n p, eof = @p, pe\n\n @command_state = (@cs == klass.lex_en_expr_value ||\n @cs == klass.lex_en_line_begin)\n\n \n# line 10834 \"lib/parser/lexer.rb\"\nbegin\n\ttestEof = false\n\t_slen, _trans, _keys, _inds, _acts, _nacts = nil\n\t_goto_level = 0\n\t_resume = 10\n\t_eof_trans = 15\n\t_again = 20\n\t_test_eof = 30\n\t_out = 40\n\twhile true\n\tif _goto_level <= 0\n\tif p == pe\n\t\t_goto_level = _test_eof\n\t\tnext\n\tend\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _resume\n\tcase _lex_from_state_actions[ @cs] \n\twhen 89 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = p\n\t\tend\n# line 10862 \"lib/parser/lexer.rb\"\n\tend\n\t_keys = @cs << 1\n\t_inds = _lex_index_offsets[ @cs]\n\t_slen = _lex_key_spans[ @cs]\n\t_wide = ( (@source_pts[p] || 0))\n\t_trans = if ( _slen > 0 && \n\t\t\t_lex_trans_keys[_keys] <= _wide && \n\t\t\t_wide <= _lex_trans_keys[_keys + 1] \n\t\t ) then\n\t\t\t_lex_indicies[ _inds + _wide - _lex_trans_keys[_keys] ] \n\t\t else \n\t\t\t_lex_indicies[ _inds + _slen ]\n\t\t end\n\tend\n\tif _goto_level <= _eof_trans\n\t @cs = _lex_trans_targs[_trans]\n\tif _lex_trans_actions[_trans] != 0\n\tcase _lex_trans_actions[_trans]\n\twhen 28 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 108 then\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 29 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n\twhen 59 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n\twhen 63 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 292 then\n# line 1227 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 36 then\n# line 1496 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 38 then\n# line 1512 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 40 then\n# line 1540 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 69 then\n# line 1730 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n\twhen 334 then\n# line 1810 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n\twhen 363 then\n# line 1879 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 290 then\n# line 1934 \"lib/parser/lexer.rl\"\n\t\tbegin\n ident_tok = tok; ident_ts = @ts; ident_te = @te; \t\tend\n\twhen 449 then\n# line 2115 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n\twhen 443 then\n# line 2116 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n\twhen 446 then\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n\twhen 440 then\n# line 2118 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n\twhen 455 then\n# line 2119 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n\twhen 417 then\n# line 2120 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n\twhen 432 then\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 425 then\n# line 2178 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 422 then\n# line 2179 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 85 then\n# line 2337 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 7 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 105 then\n# line 1065 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @cs = 759;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 5 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 101 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 100 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 106 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 103 then\n# line 989 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 104 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 6 then\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 4 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 133 then\n# line 1065 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @cs = 759;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 10 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 130 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 129 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 134 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 132 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 11 then\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 9 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 157 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 156 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 159 then\n# line 989 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 160 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 163 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 162 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 165 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 172 then\n# line 1065 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @cs = 759;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 13 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 169 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 168 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 173 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 171 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 12 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 175 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 174 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 182 then\n# line 1065 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DBEG, '#{'.freeze)\n\n if current_literal.heredoc?\n current_literal.saved_herebody_s = @herebody_s\n @herebody_s = nil\n end\n\n current_literal.start_interp_brace\n @cs = 759;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 15 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 178 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 177 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 183 then\n# line 1002 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n current_literal.flush_string\n current_literal.extend_content\n\n emit(:tSTRING_DVAR, nil, @ts, @ts + 1)\n\n p = @ts\n \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 180 then\n# line 989 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 181 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 14 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 185 then\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 184 then\n# line 848 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n string = tok\n\n # tLABEL_END is only possible in non-cond context on >= 2.2\n if @version >= 22 && !@cond.active?\n lookahead = @source_buffer.slice(@te...@te+2)\n end\n\n current_literal = literal\n if !current_literal.heredoc? &&\n (token = current_literal.nest_and_try_closing(string, @ts, @te, lookahead))\n if token[0] == :tLABEL_END\n p += 1\n pop_literal\n @cs = 752;\n else\n @cs = (pop_literal);\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n current_literal.extend_string(string, @ts, @te)\n end\n end\n\t\tend\n\twhen 187 then\n# line 989 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n literal.extend_space @ts, @te\n end\n\t\tend\n\twhen 188 then\n# line 1157 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tREGEXP_OPT, tok(@ts, @te - 1), @ts, @te - 1)\n p = p - 1;\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 189 then\n# line 1144 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n unknown_options = tok.scan(/[^imxouesn]/)\n if unknown_options.any?\n diagnostic :error, :regexp_options,\n { :options => unknown_options.join }\n end\n\n emit(:tREGEXP_OPT)\n @cs = 767;\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 16 then\n# line 1286 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 190 then\n# line 1286 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^\\$([1-9][0-9]*)$/\n emit(:tNTH_REF, tok(@ts + 1).to_i)\n elsif tok =~ /^\\$([&`'+])$/\n emit(:tBACK_REF)\n else\n emit(:tGVAR)\n end\n\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 192 then\n# line 1299 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@@[0-9]/\n diagnostic :error, :cvar_name, { :name => tok }\n end\n\n emit(:tCVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 191 then\n# line 1309 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tok =~ /^@[0-9]/\n diagnostic :error, :ivar_name, { :name => tok }\n end\n\n emit(:tIVAR)\n @cs = (stack_pop); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 213 then\n# line 1330 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(KEYWORDS_BEGIN);\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 199 then\n# line 1338 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tIDENTIFIER)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 18 then\n# line 1342 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n @cs = 767; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 196 then\n# line 1351 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 208 then\n# line 1355 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 20 then\n# line 1361 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if version?(23)\n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 195 then\n# line 1374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 194 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 212 then\n# line 1330 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS_BEGIN);\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 209 then\n# line 1334 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 211 then\n# line 1338 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 206 then\n# line 1342 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n @cs = 767; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 202 then\n# line 1351 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 207 then\n# line 1358 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 200 then\n# line 1371 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 205 then\n# line 1374 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 19 then\n# line 1351 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit_table(PUNCTUATION)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 17 then\n# line 1374 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 198 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 39 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN);\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 40 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 41 then\n\tbegin begin p = (( @te))-1; end\n emit(:tIDENTIFIER)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 22 then\n# line 1386 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n p = p - 1; @cs = 752; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 215 then\n# line 1392 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 214 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 217 then\n# line 1389 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 216 then\n# line 1392 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 21 then\n# line 1392 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 223 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 222 then\n# line 1424 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 221 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 233 then\n# line 1403 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 224 then\n# line 1407 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tIDENTIFIER)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 229 then\n# line 1418 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 227 then\n# line 1421 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 232 then\n# line 1424 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 256 then\n# line 1482 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 239 then\n# line 1500 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 245 then\n# line 1524 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 24 then\n# line 1532 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 247 then\n# line 1541 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 39 then\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 234 then\n# line 1566 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 235 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 246 then\n# line 1491 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 242 then\n# line 1513 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 244 then\n# line 1529 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 238 then\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 237 then\n# line 1557 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 255 then\n# line 1566 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 25 then\n# line 1557 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 41 then\n# line 1566 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 23 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 67 then\n\tbegin begin p = (( @te))-1; end\n\n if tok(tm, tm + 1) == '/'.freeze\n # Ambiguous regexp literal.\n diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1)\n end\n\n p = tm - 1\n \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 68 then\n\tbegin begin p = (( @te))-1; end\n\n diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) },\n range(tm, @te)\n\n p = tm - 1\n \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\twhen 73 then\n\tbegin begin p = (( @te))-1; end\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\telse\n\tbegin begin p = (( @te))-1; end\nend\nend \n\t\t\tend\n\twhen 43 then\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 466\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 260 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 261 then\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 466\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 44 then\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 466\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 42 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 80 then\n\tbegin begin p = (( @te))-1; end\n\n if @cond.active?\n emit(:kDO_COND, 'do'.freeze, @te - 2, @te)\n else\n emit(:kDO, 'do'.freeze, @te - 2, @te)\n end\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 81 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 466\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 271 then\n# line 1636 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_do(true)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 264 then\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 265 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 266 then\n# line 1639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 269 then\n# line 1642 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 275 then\n# line 1666 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 274 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 283 then\n# line 1658 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 277 then\n# line 1660 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 281 then\n# line 1666 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 276 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 88 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 89 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 56 then\n# line 1681 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 314 then\n# line 1698 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type = delimiter = tok[0].chr\n p = p - 1; \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 308 then\n# line 1705 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = @source_buffer.slice(@ts).chr, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 54 then\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 327 then\n# line 1768 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1; p = p - 1;\n emit(:tSYMBEG, tok(@ts, @ts + 1), @ts, @ts + 1)\n \tbegin\n\t\t @cs = 325\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 315 then\n# line 1776 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 326 then\n# line 1784 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1, @ts + 2))\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 68 then\n# line 1798 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 331 then\n# line 1825 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 330 then\n# line 1835 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 316 then\n# line 1874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 51 then\n# line 1894 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = p - 1;\n\n if version?(18)\n ident = tok(@ts, @te - 2)\n\n emit((@source_buffer.slice(@ts) =~ /[A-Z]/) ? :tCONSTANT : :tIDENTIFIER,\n ident, @ts, @te - 2)\n p = p - 1; # continue as a symbol\n\n if !@static_env.nil? && @static_env.declared?(ident)\n @cs = 767;\n else\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1)\n @cs = 752;\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 48 then\n# line 1936 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te)\n p = ident_te - 1\n\n if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25\n @cs = 438;\n else\n @cs = 497;\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 302 then\n# line 1955 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 181\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 55 then\n# line 1971 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 286 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 313 then\n# line 1681 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 312 then\n# line 1688 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tSTAR, '*'.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 309 then\n# line 1712 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n type, delimiter = tok[0..-2], tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 307 then\n# line 1718 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 317 then\n# line 1798 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1), @ts)\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 329 then\n# line 1835 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 335 then\n# line 1841 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 310 then\n# line 1874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 289 then\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 299 then\n# line 1952 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 301 then\n# line 1955 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 181\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 304 then\n# line 1971 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 53 then\n# line 1718 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1)\n end\n\t\tend\n\twhen 71 then\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 47 then\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 52 then\n# line 1952 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n\t\tend\n\twhen 67 then\n# line 1971 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = @ts - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 50 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 113 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 114 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 511; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 115 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 117 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 118 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 365 then\n# line 1991 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 366 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 367 then\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 371 then\n# line 1991 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 74 then\n# line 2001 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 375 then\n# line 2006 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n \tbegin\n\t\t @cs = (push_literal(tok, tok, @ts))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 374 then\n# line 2016 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 373 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 377 then\n# line 2010 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 376 then\n# line 2016 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 73 then\n# line 2016 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 535\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 411 then\n# line 2027 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tLAMBDA, '->'.freeze, @ts, @ts + 2)\n\n @lambda_stack.push @paren_nest\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 83 then\n# line 2064 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 385 then\n# line 2199 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n type, delimiter = tok, tok[-1].chr\n \tbegin\n\t\t @cs = (push_literal(type, delimiter, @ts, nil, false, false, true))\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 76 then\n# line 2217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 407 then\n# line 2224 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 445; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 459 then\n# line 2248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION)\n \tbegin\n\t\t @cs = 759\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 400 then\n# line 2257 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION);\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 392 then\n# line 2267 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 767;\n else\n @cs = 503;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 405 then\n# line 2292 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tOP_ASGN, tok(@ts, @te - 1))\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 390 then\n# line 2296 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tEH, '?'.freeze)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 387 then\n# line 2304 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit_table(PUNCTUATION)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 389 then\n# line 2317 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tSEMI, ';'.freeze)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 464 then\n# line 2320 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :error, :bare_backslash, nil, range(@ts, @ts + 1)\n p = p - 1;\n end\n\t\tend\n\twhen 384 then\n# line 2326 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 383 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 474 then\n# line 2060 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 325; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 472 then\n# line 2064 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 471 then\n# line 2075 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(KEYWORDS)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 414 then\n# line 2149 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 461 then\n# line 2209 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 404 then\n# line 2217 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1; \tbegin\n\t\t @stack[ @top] = @cs\n\t\t @top+= 1\n\t\t @cs = 320\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 412 then\n# line 2224 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 445; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 466 then\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 410 then\n# line 2248 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n \tbegin\n\t\t @cs = 759\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 406 then\n# line 2257 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION);\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 399 then\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 413 then\n# line 2304 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 397 then\n# line 2311 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 403 then\n# line 2326 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 81 then\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 77 then\n# line 2149 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :error, :no_dot_digit_literal\n end\n\t\tend\n\twhen 80 then\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 75 then\n# line 2326 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin \n diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] }\n end\n\t\tend\n\twhen 78 then\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 135 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 136 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 325; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 137 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 138 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 139 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 140 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 511; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 141 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 142 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 143 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 144 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 146 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 148 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 150 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 154 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 155 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 157 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(PUNCTUATION);\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 158 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 87 then\n# line 2338 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 479 then\n# line 2341 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 181; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 480 then\n# line 2341 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 181; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 84 then\n# line 2341 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin emit(:tNL, nil, @newline_s, @newline_s + 1)\n p = p - 1; @cs = 181; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 483 then\n# line 2351 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = (@cs_before_block_comment)\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 482 then\n# line 2359 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n diagnostic :fatal, :embedded_document, nil,\n range(@eq_begin_s, @eq_begin_s + '=begin'.length)\n end\n\t\tend\n\twhen 98 then\n# line 2369 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 948\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 2 then\n# line 2373 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 90 then\n# line 2376 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = p - 1; \tbegin\n\t\t @cs = 759\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 91 then\n# line 517 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Sit at EOF indefinitely. #advance would return $eof each time.\n # This allows to feed the lexer more data if needed; this is only used\n # in tests.\n #\n # Note that this action is not embedded into e_eof like e_heredoc_nl and e_bs\n # below. This is due to the fact that scanner state at EOF is observed\n # by tests, and encapsulating it in a rule would break the introspection.\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 92 then\n# line 2366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 97 then\n# line 2369 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 948\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 96 then\n# line 2376 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 759\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 1 then\n# line 2376 \"lib/parser/lexer.rl\"\n\t\tbegin\n begin p = (( @te))-1; end\n begin p = p - 1; \tbegin\n\t\t @cs = 759\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 66 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 102 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 131 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 158 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 164 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 170 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 176 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 179 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 186 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n\twhen 257 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1482 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n # Unlike expr_beg as invoked in the next rule, do not warn\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 248 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1541 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 240 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 332 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1825 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n escape = { \" \" => '\\s', \"\\r\" => '\\r', \"\\n\" => '\\n', \"\\t\" => '\\t',\n \"\\v\" => '\\v', \"\\f\" => '\\f' }[@source_buffer.slice(@ts + 1)]\n diagnostic :warning, :invalid_escape_use, { :escape => escape }, range\n\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 303 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1955 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n @cs_before_block_comment = @cs\n \tbegin\n\t\t @cs = 181\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 484 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2351 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit_comment(@eq_begin_s, @te)\n \tbegin\n\t\t @cs = (@cs_before_block_comment)\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 481 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2356 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n\t\tend\n\twhen 99 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2369 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin @eq_begin_s = @ts\n \tbegin\n\t\t @cs = 948\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 3 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 2373 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = pe - 3 end\n\t\tend\n\twhen 435 then\n# line 625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 433 then\n# line 626 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, chars)) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 438 then\n# line 627 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 436 then\n# line 628 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 2); p -= 2 } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 434 then\n# line 629 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 2); p -= 2 } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 437 then\n# line 630 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars, @ts, @te - 6); p -= 6 } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 426 then\n# line 634 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Float(chars))) } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 427 then\n# line 635 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars), @ts, @te - 2); p -= 2 } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 428 then\n# line 639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tRATIONAL, Rational(chars)) } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 430 then\n# line 640 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tIMAGINARY, Complex(0, Rational(chars))) } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 429 then\n# line 641 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars), @ts, @te - 6); p -= 6 } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 125 then\n# line 656 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 152 then\n# line 656 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 353 then\n# line 656 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = \"\"\n\n codepoints = tok(@escape_s + 2, p - 1)\n codepoint_s = @escape_s + 2\n\n if @version < 24\n if codepoints.start_with?(\" \") || codepoints.start_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(@escape_s + 2, @escape_s + 3)\n end\n\n if spaces_p = codepoints.index(/[ \\t]{2}/)\n diagnostic :fatal, :invalid_unicode_escape, nil,\n range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2)\n end\n\n if codepoints.end_with?(\" \") || codepoints.end_with?(\"\\t\")\n diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p)\n end\n end\n\n codepoints.scan(/([0-9a-fA-F]+)|([ \\t]+)/).each do |(codepoint_str, spaces)|\n if spaces\n codepoint_s += spaces.length\n else\n codepoint = codepoint_str.to_i(16)\n\n if codepoint >= 0x110000\n diagnostic :error, :unicode_point_too_large, nil,\n range(codepoint_s, codepoint_s + codepoint_str.length)\n break\n end\n\n @escape += codepoint.chr(Encoding::UTF_8)\n codepoint_s += codepoint_str.length\n end\n end\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 109 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 136 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 337 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 112 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 139 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 340 then\n# line 703 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_escape\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 114 then\n# line 722 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 141 then\n# line 722 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 342 then\n# line 722 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 111 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 138 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 339 then\n# line 729 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 128 then\n# line 733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 155 then\n# line 733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 356 then\n# line 733 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 127 then\n# line 737 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 154 then\n# line 737 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 355 then\n# line 737 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 122 then\n# line 743 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 149 then\n# line 743 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 350 then\n# line 743 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 121 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 148 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 349 then\n# line 747 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 126 then\n# line 753 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 153 then\n# line 753 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 354 then\n# line 753 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 123 then\n# line 767 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 150 then\n# line 767 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 351 then\n# line 767 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 110 then\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 137 then\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 338 then\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 161 then\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 166 then\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 57 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 30 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1496 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 32 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1512 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 34 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1540 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n\twhen 201 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1371 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 220 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1389 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 228 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1421 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 33 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 259 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1557 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 251 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1563 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 270 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 282 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1660 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 278 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1663 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 58 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1681 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 49 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1936 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te)\n p = ident_te - 1\n\n if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25\n @cs = 438;\n else\n @cs = 497;\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 300 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1952 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 372 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 368 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1982 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @in_kwarg\n p = p - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n else\n \tbegin\n\t\t @cs = 181\n\t\t_goto_level = _again\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 381 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2010 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 378 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2013 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 181\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 465 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2311 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 398 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2314 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 945\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 93 then\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 2366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 250 then\n# line 1027 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1465 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze, @te - 1, @te)\n else\n emit(:tLCURLY, '{'.freeze, @te - 1, @te)\n end\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 272 then\n# line 1027 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1625 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE_ARG, '{'.freeze)\n end\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 364 then\n# line 1027 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 1852 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n emit(:tLAMBEG, '{'.freeze)\n else\n emit(:tLBRACE, '{'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 477 then\n# line 1027 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n current_literal = literal\n if current_literal\n current_literal.start_interp_brace\n end\n \t\tend\n# line 2035 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 478 then\n# line 1036 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n current_literal = literal\n if current_literal\n if current_literal.end_interp_brace_and_try_closing\n if version?(18, 19)\n emit(:tRCURLY, '}'.freeze, p - 1, p)\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n else\n emit(:tSTRING_DEND, '}'.freeze, p - 1, p)\n end\n\n if current_literal.saved_herebody_s\n @herebody_s = current_literal.saved_herebody_s\n end\n\n\n p = p - 1;\n @cs = (next_state_for_literal(current_literal));\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n \t\tend\n# line 2267 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 767;\n else\n @cs = 503;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 60 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n\twhen 64 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 204 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1371 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 219 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1389 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 231 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1421 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 253 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1560 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 268 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 280 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1660 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 306 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1952 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 370 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 380 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2010 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 402 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2311 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 95 then\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 225 then\n# line 1227 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1411 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 318 then\n# line 1227 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 291 then\n# line 1227 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 395 then\n# line 1227 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 135 then\n\tbegin begin p = (( @te))-1; end\n\n if @lambda_stack.last == @paren_nest\n @lambda_stack.pop\n\n if tok == '{'.freeze\n emit(:tLAMBEG, '{'.freeze)\n else # 'do'\n emit(:kDO_LAMBDA, 'do'.freeze)\n end\n else\n if tok == '{'.freeze\n emit(:tLCURLY, '{'.freeze)\n else # 'do'\n emit_do\n end\n end\n\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 136 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 325; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 137 then\n\tbegin begin p = (( @te))-1; end\n emit(:kCLASS, 'class'.freeze, @ts, @ts + 5)\n emit(:tLSHFT, '<<'.freeze, @te - 2, @te)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 138 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 139 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 140 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n @cs = 511; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 141 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(KEYWORDS)\n\n if version?(18) && tok == 'not'.freeze\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 142 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18)\n emit(:tIDENTIFIER)\n\n unless !@static_env.nil? && @static_env.declared?(tok)\n @cs = (arg_or_cmdarg);\n end\n else\n emit(:k__ENCODING__, '__ENCODING__'.freeze)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 143 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 144 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 146 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 147 then\n\tbegin begin p = (( @te))-1; end\n\n if version?(18, 19, 20)\n diagnostic :error,\n :trailing_in_number, { :character => tok(@te - 1, @te) },\n range(@te - 1, @te)\n else\n emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1)\n p = p - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 148 then\n\tbegin begin p = (( @te))-1; end\n\n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 150 then\n\tbegin begin p = (( @te))-1; end\n emit(:tCONSTANT)\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 154 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 155 then\n\tbegin begin p = (( @te))-1; end\n\n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 157 then\n\tbegin begin p = (( @te))-1; end\n\n emit_table(PUNCTUATION);\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 158 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 226 then\n# line 1228 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1411 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tFID, tok(@ts, tm), @ts, tm)\n @cs = (arg_or_cmdarg); p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 319 then\n# line 1228 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 293 then\n# line 1228 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 396 then\n# line 1228 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2231 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if tm == @te\n # Suffix was consumed, e.g. foo!\n emit(:tFID)\n else\n # Suffix was not consumed, e.g. foo!=\n emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm)\n p = tm - 1\n end\n @cs = 466; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 320 then\n# line 1233 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 294 then\n# line 1233 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 321 then\n# line 1234 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 295 then\n# line 1234 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 325 then\n# line 1235 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 298 then\n# line 1235 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 324 then\n# line 1236 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 297 then\n# line 1236 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1 \"NONE\"\n\t\tbegin\n\tcase @act\n\twhen 94 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1)\n p = p - 1; @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\twhen 113 then\n\tbegin begin p = (( @te))-1; end\n emit_table(PUNCTUATION_BEGIN)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 114 then\n\tbegin begin p = (( @te))-1; end\n emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 511; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 115 then\n\tbegin begin p = (( @te))-1; end\n emit_table(KEYWORDS_BEGIN)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\twhen 117 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\twhen 118 then\n\tbegin begin p = (( @te))-1; end\n\n emit(:tIDENTIFIER)\n\n if !@static_env.nil? && @static_env.declared?(tok)\n @cs = 438; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (arg_or_cmdarg); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\twhen 122 then\n\tbegin begin p = (( @te))-1; end\n p = @ts - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\nend \n\t\t\tend\n\twhen 322 then\n# line 1237 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 296 then\n# line 1237 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 3 \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin p = @ts - 1\n \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 323 then\n# line 1242 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 1790 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm)\n p = tm - 1\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 460 then\n# line 1247 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p - 2 \t\tend\n# line 2213 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tCONSTANT, tok(@ts, tm), @ts, tm)\n p = tm - 1; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 249 then\n# line 1253 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 1459 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze, @te - 1, @te)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 357 then\n# line 1253 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 1864 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK, '['.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 463 then\n# line 1253 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n \t\tend\n# line 2300 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLBRACK2, '['.freeze)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 241 then\n# line 1260 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1440 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n if version?(18)\n emit(:tLPAREN2, '('.freeze, @te - 1, @te)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 254 then\n# line 1260 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1453 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN2, '('.freeze)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 262 then\n# line 1260 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1579 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te)\n if version?(18)\n @cs = 759; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n end\n\t\tend\n\twhen 311 then\n# line 1260 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 1869 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:tLPAREN, '('.freeze)\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 408 then\n# line 1260 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @cond.push(false); @cmdarg.push(false)\n\n @paren_nest += 1\n \t\tend\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit_table(PUNCTUATION)\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 409 then\n# line 1266 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @paren_nest -= 1\n \t\tend\n# line 2267 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n emit_table(PUNCTUATION)\n\n if @version < 24\n @cond.lexpop\n @cmdarg.lexpop\n else\n @cond.pop\n @cmdarg.pop\n end\n\n if tok == '}'.freeze || tok == ']'.freeze\n if @version >= 25\n @cs = 767;\n else\n @cs = 503;\n end\n else # )\n # fnext expr_endfn; ?\n end\n\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 70 then\n# line 1730 \"lib/parser/lexer.rl\"\n\t\tbegin\n heredoc_e = p \t\tend\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 328 then\n# line 1731 \"lib/parser/lexer.rl\"\n\t\tbegin\n new_herebody_s = p \t\tend\n# line 1732 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n tok(@ts, heredoc_e) =~ /^<<(-?)(~?)([\"'`]?)(.*)\\3$/m\n\n indent = !$1.empty? || !$2.empty?\n dedent_body = !$2.empty?\n type = $3.empty? ? '<<\"'.freeze : ('<<'.freeze + $3)\n delimiter = $4\n\n if @version >= 24\n if delimiter.count(\"\\n\") > 0\n if delimiter.end_with?(\"\\n\")\n diagnostic :warning, :heredoc_id_ends_with_nl, nil, range(@ts, @ts + 1)\n delimiter = delimiter.rstrip\n else\n diagnostic :fatal, :heredoc_id_has_newline, nil, range(@ts, @ts + 1)\n end\n end\n end\n\n if dedent_body && version?(18, 19, 20, 21, 22)\n emit(:tLSHFT, '<<'.freeze, @ts, @ts + 2)\n p = @ts + 1\n @cs = 535; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n @cs = (push_literal(type, delimiter, @ts, heredoc_e, indent, dedent_body));\n\n @herebody_s ||= new_herebody_s\n p = @herebody_s - 1\n end\n end\n\t\tend\n\twhen 333 then\n# line 1810 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = nil \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 360 then\n# line 1879 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1880 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin emit(:kRESCUE, 'rescue'.freeze, @ts, tm)\n p = tm - 1\n @cs = 511; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 450 then\n# line 2115 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 444 then\n# line 2116 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 447 then\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 441 then\n# line 2118 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 456 then\n# line 2119 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 419 then\n# line 2120 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 457 then\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 86 then\n# line 2337 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 2338 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin p = tm - 1; \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 8 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 423 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2179 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n\twhen 210 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1330 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 39;\t\tend\n\twhen 197 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1334 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 40;\t\tend\n\twhen 193 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1338 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 41;\t\tend\n\twhen 26 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1500 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 67;\t\tend\n\twhen 243 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1513 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 68;\t\tend\n\twhen 27 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 236 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1557 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 74;\t\tend\n\twhen 263 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1589 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 80;\t\tend\n\twhen 45 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 81;\t\tend\n\twhen 284 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1654 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 88;\t\tend\n\twhen 273 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1658 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 89;\t\tend\n\twhen 287 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1874 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 113;\t\tend\n\twhen 359 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1880 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 114;\t\tend\n\twhen 358 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1886 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 115;\t\tend\n\twhen 72 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 117;\t\tend\n\twhen 285 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 118;\t\tend\n\twhen 288 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1971 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 122;\t\tend\n\twhen 473 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2035 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 135;\t\tend\n\twhen 468 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2060 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 136;\t\tend\n\twhen 476 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2070 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 138;\t\tend\n\twhen 469 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2075 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 139;\t\tend\n\twhen 470 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2079 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 140;\t\tend\n\twhen 475 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2083 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 141;\t\tend\n\twhen 467 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2094 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 142;\t\tend\n\twhen 462 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2108 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 143;\t\tend\n\twhen 388 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 144;\t\tend\n\twhen 421 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2166 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 147;\t\tend\n\twhen 79 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 148;\t\tend\n\twhen 391 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2209 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 150;\t\tend\n\twhen 382 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 154;\t\tend\n\twhen 394 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2231 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 155;\t\tend\n\twhen 386 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2257 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 157;\t\tend\n\twhen 393 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2263 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 158;\t\tend\n\twhen 167 then\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 927 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p+1\n begin \n current_literal = literal\n if @te == pe\n diagnostic :fatal, :string_eof, nil,\n range(current_literal.str_s, current_literal.str_s + 1)\n end\n\n if current_literal.heredoc?\n line = tok(@herebody_s, @ts).gsub(/\\r+$/, ''.freeze)\n\n if version?(18, 19, 20)\n # See ruby:c48b4209c\n line = line.gsub(/\\r.*$/, ''.freeze)\n end\n\n # Try ending the heredoc with the complete most recently\n # scanned line. @herebody_s always refers to the start of such line.\n if current_literal.nest_and_try_closing(line, @herebody_s, @ts)\n # Adjust @herebody_s to point to the next line.\n @herebody_s = @te\n\n # Continue regular lexing after the heredoc reference (<<END).\n p = current_literal.heredoc_e - 1\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n else\n # Calculate indentation level for <<~HEREDOCs.\n current_literal.infer_indent_level(line)\n\n # Ditto.\n @herebody_s = @te\n end\n else\n # Try ending the literal with a newline.\n if current_literal.nest_and_try_closing(tok, @ts, @te)\n @cs = (pop_literal); \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\n if @herebody_s\n # This is a regular literal intertwined with a heredoc. Like:\n #\n # p <<-foo+\"1\n # bar\n # foo\n # 2\"\n #\n # which, incidentally, evaluates to \"bar\\n1\\n2\".\n p = @herebody_s - 1\n @herebody_s = nil\n end\n end\n\n if current_literal.words? && !eof_codepoint?(@source_pts[p])\n current_literal.extend_space @ts, @te\n else\n # A literal newline is appended if the heredoc was _not_ closed\n # this time (see fbreak above). See also Literal#nest_and_try_closing\n # for rationale of calling #flush_string here.\n current_literal.extend_string tok, @ts, @te\n current_literal.flush_string\n end\n end\n\t\tend\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n\twhen 115 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 142 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 343 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 120 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 147 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 348 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 119 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 146 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 347 then\n# line 717 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 117 then\n# line 722 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 144 then\n# line 722 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 345 then\n# line 722 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = \"\\x7f\" \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 113 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 140 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 341 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 124 then\n# line 753 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 767 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 151 then\n# line 753 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 767 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 352 then\n# line 753 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p)\n \t\tend\n# line 767 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 107 then\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 135 then\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 336 then\n# line 799 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape_s = p\n @escape = nil\n \t\tend\n# line 793 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n diagnostic :fatal, :escape_eof, nil, range(p - 1, p)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 61 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n\twhen 203 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1371 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 218 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1389 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 230 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1421 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 252 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1560 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \tbegin\n\t\t @cs = 767\n\t\t_goto_level = _again\n\t\tnext\n\tend\n end\n\t\tend\n\twhen 267 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1639 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 279 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1660 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 305 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1952 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 369 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1979 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 379 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2010 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 401 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2311 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 94 then\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2366 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1;\t\tend\n\twhen 452 then\n# line 2119 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 416 then\n# line 2120 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n\twhen 431 then\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 424 then\n# line 2178 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 633 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 420 then\n# line 2179 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 633 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tFLOAT, Float(chars)) } \t\tend\n# line 2181 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@ts, @num_suffix_s)\n\n if version?(18, 19, 20)\n emit(:tFLOAT, Float(digits), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits)\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 258 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 491 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # Record position of a newline for precise location reporting on tNL\n # tokens.\n #\n # This action is embedded directly into c_nl, as it is idempotent and\n # there are no cases when we need to skip it.\n @newline_s = p\n \t\tend\n# line 1557 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 74;\t\tend\n\twhen 35 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1552 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 73;\t\tend\n\twhen 46 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1602 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 81;\t\tend\n\twhen 65 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1681 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 94;\t\tend\n\twhen 82 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 2064 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 137;\t\tend\n\twhen 37 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1512 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1513 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 68;\t\tend\n\twhen 362 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1879 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1925 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 117;\t\tend\n\twhen 361 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1879 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1271 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 118;\t\tend\n\twhen 453 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2119 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 144;\t\tend\n\twhen 118 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 145 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 346 then\n# line 696 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n codepoint = @source_pts[p - 1]\n if (@escape = ESCAPES[codepoint]).nil?\n @escape = encode_escape(@source_buffer.slice(p - 1))\n end\n \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 116 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 143 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 872 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n current_literal = literal\n # Get the first character after the backslash.\n escaped_char = @source_buffer.slice(@escape_s).chr\n\n if current_literal.munge_escape? escaped_char\n # If this particular literal uses this character as an opening\n # or closing delimiter, it is an escape sequence for that\n # particular character. Write it without the backslash.\n\n if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char)\n # Regular expressions should include escaped delimiters in their\n # escaped form, except when the escaped character is\n # a closing delimiter but not a regexp metacharacter.\n #\n # The backslash itself cannot be used as a closing delimiter\n # at the same time as an escape symbol, but it is always munged,\n # so this branch also executes for the non-closing-delimiter case\n # for the backslash.\n current_literal.extend_string(tok, @ts, @te)\n else\n current_literal.extend_string(escaped_char, @ts, @te)\n end\n else\n # It does not. So this is an actual escape sequence, yay!\n if current_literal.squiggly_heredoc? && escaped_char == \"\\n\".freeze\n # Squiggly heredocs like\n # <<~-HERE\n # 1\\\n # 2\n # HERE\n # treat '\\' as a line continuation, but still dedent the body, so the heredoc above becomes \"12\\n\".\n # This information is emitted as is, without escaping,\n # later this escape sequence (\\\\\\n) gets handled manually in the Lexer::Dedenter\n current_literal.extend_string(tok, @ts, @te)\n elsif current_literal.supports_line_continuation_via_slash? && escaped_char == \"\\n\".freeze\n # Heredocs, regexp and a few other types of literals support line\n # continuation via \\\\\\n sequence. The code like\n # \"a\\\n # b\"\n # must be parsed as \"ab\"\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n elsif current_literal.regexp?\n # Regular expressions should include escape sequences in their\n # escaped form. On the other hand, escaped newlines are removed (in cases like \"\\\\C-\\\\\\n\\\\M-x\")\n current_literal.extend_string(tok.gsub(\"\\\\\\n\".freeze, ''.freeze), @ts, @te)\n else\n current_literal.extend_string(@escape || tok, @ts, @te)\n end\n end\n end\n\t\tend\n\twhen 344 then\n# line 723 \"lib/parser/lexer.rl\"\n\t\tbegin\n @escape = @source_buffer.slice(p - 1).chr \t\tend\n# line 707 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord & 0x9f)\n \t\tend\n# line 711 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n @escape = encode_escape(@escape[0].ord | 0x80)\n \t\tend\n# line 1812 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n value = @escape || tok(@ts + 1)\n\n if version?(18)\n emit(:tINTEGER, value.getbyte(0))\n else\n emit(:tCHARACTER, value)\n end\n\n @cs = 767; \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 448 then\n# line 2115 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 16; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 442 then\n# line 2116 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 445 then\n# line 2117 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 439 then\n# line 2118 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 2; @num_digits_s = p \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 451 then\n# line 2119 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 415 then\n# line 2120 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2122 \"lib/parser/lexer.rl\"\n\t\tbegin\n @te = p\np = p - 1; begin \n digits = tok(@num_digits_s, @num_suffix_s)\n\n if digits.end_with? '_'.freeze\n diagnostic :error, :trailing_in_number, { :character => '_'.freeze },\n range(@te - 1, @te)\n elsif digits.empty? && @num_base == 8 && version?(18)\n # 1.8 did not raise an error on 0o.\n digits = '0'.freeze\n elsif digits.empty?\n diagnostic :error, :empty_numeric\n elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/))\n invalid_s = @num_digits_s + invalid_idx\n diagnostic :error, :invalid_octal, nil,\n range(invalid_s, invalid_s + 1)\n end\n\n if version?(18, 19, 20)\n emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s)\n p = @num_suffix_s - 1\n else\n @num_xfrm.call(digits.to_i(@num_base))\n end\n \tbegin\n\t\tp += 1\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\n end\n\t\tend\n\twhen 31 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 839 \"lib/parser/lexer.rl\"\n\t\tbegin\n\n # After every heredoc was parsed, @herebody_s contains the\n # position of next token after all heredocs.\n if @herebody_s\n p = @herebody_s\n @herebody_s = nil\n end\n \t\tend\n# line 1512 \"lib/parser/lexer.rl\"\n\t\tbegin\n tm = p \t\tend\n# line 1513 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 68;\t\tend\n\twhen 62 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 1183 \"lib/parser/lexer.rl\"\n\t\tbegin\n @sharp_s = p - 1 \t\tend\n# line 1186 \"lib/parser/lexer.rl\"\n\t\tbegin\n emit_comment(@sharp_s, p == pe ? p - 2 : p) \t\tend\n# line 1681 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 94;\t\tend\n\twhen 458 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2154 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 146;\t\tend\n\twhen 454 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2119 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 10; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2154 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 146;\t\tend\n\twhen 418 then\n# line 1 \"NONE\"\n\t\tbegin\n @te = p+1\n\t\tend\n# line 2120 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_base = 8; @num_digits_s = @ts \t\tend\n# line 2121 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_suffix_s = p \t\tend\n# line 624 \"lib/parser/lexer.rl\"\n\t\tbegin\n @num_xfrm = lambda { |chars| emit(:tINTEGER, chars) } \t\tend\n# line 2154 \"lib/parser/lexer.rl\"\n\t\tbegin\n @act = 146;\t\tend\n# line 22832 \"lib/parser/lexer.rb\"\n\tend\n\tend\n\tend\n\tif _goto_level <= _again\n\tcase _lex_to_state_actions[ @cs] \n\twhen 88 then\n# line 1 \"NONE\"\n\t\tbegin\n @ts = nil;\t\tend\n# line 22842 \"lib/parser/lexer.rb\"\n\tend\n\n\tif @cs == 0\n\t\t_goto_level = _out\n\t\tnext\n\tend\n\tp += 1\n\tif p != pe\n\t\t_goto_level = _resume\n\t\tnext\n\tend\n\tend\n\tif _goto_level <= _test_eof\n\tif p == eof\n\tif _lex_eof_trans[ @cs] > 0\n\t\t_trans = _lex_eof_trans[ @cs] - 1;\n\t\t_goto_level = _eof_trans\n\t\tnext;\n\tend\n\tend\n\n\tend\n\tif _goto_level <= _out\n\t\tbreak\n\tend\nend\n\tend\n\n# line 283 \"lib/parser/lexer.rl\"\n # %\n\n @p = p\n\n if @token_queue.any?\n @token_queue.shift\n elsif @cs == klass.lex_error\n [ false, [ '$error'.freeze, range(p - 1, p) ] ]\n else\n eof = @source_pts.size\n [ false, [ '$eof'.freeze, range(eof, eof) ] ]\n end\n end",
"def enqueue_next_tokens()\n case current_state_name(:token_recognition)\n when :ready\n if significant_indentation && (current_state_name(:line_positioning) == :at_line_start)\n scan_indentation()\n end\n loop do\n scanner.skip(noise_pattern)\n if scanner.check(eol_pattern)\n eol_checked()\n next\n end\n if eos?\n eos_detected()\n break\n end\n\n # Here starts the core tokenizing work...\n # Retrieve the (state-dependent) main/start Lexer rule...\n theRule = find_rule(main_rule_name())\n\n begin\n # Now apply the rule to the input managed by this Lexer\n theRule.apply_to(self)\t# One or more tokens are placed in the queue\n\n unless current_state_name(:token_recognition) == :recognized\n # Error detected...\n raise InternalLexerError.new(\"Internal error: Lexer in unexpected state '#{current_state_name(:token_recognition)}'\", nil)\n end\n rescue LexerError => exc\n # Enqueue the \"exception\" as an error token\n error_token = [:error, exc]\n queue.enqueue error_token\n end\n end # loop\n else # Other state ...\n raise LexerSetupError, \"Unimplemented handler\"\n end\n end",
"def lex(input)\n line = 1\n offset = 0\n ending = input.length\n\n until offset == ending do\n next_token(input, offset, line).tap do |token|\n raise UnconsumedInputError,\n \"Unmatched input #{input[offset..-1].inspect} on line #{line}\" if token.nil?\n\n token[:offset] = offset\n line, token[:line] = token[:line], line\n offset += token[:value].length\n yield token unless token[:discarded]\n end\n end\n\n yield ({ :name => :$end, :line => line, :value => nil, :offset => offset })\n end",
"def token!\n # at line 1:8: ( RET | SCOPE | FRAGMENT | TREE_BEGIN | ROOT | BANG | RANGE | REWRITE | AT | LABEL_ASSIGN | LIST_LABEL_ASSIGN | T__67 | T__68 | T__69 | T__70 | T__71 | T__72 | T__73 | T__74 | T__75 | T__76 | T__77 | T__78 | T__79 | T__80 | T__81 | T__82 | T__83 | T__84 | T__85 | T__86 | T__87 | T__88 | T__89 | T__90 | T__91 | T__92 | T__93 | SL_COMMENT | ML_COMMENT | CHAR_LITERAL | STRING_LITERAL | DOUBLE_QUOTE_STRING_LITERAL | DOUBLE_ANGLE_STRING_LITERAL | INT | ARG_ACTION | ACTION | TOKEN_REF | RULE_REF | OPTIONS | TOKENS | WS )\n alt_21 = 52\n alt_21 = @dfa21.predict(@input)\n case alt_21\n when 1\n # at line 1:10: RET\n ret!\n\n when 2\n # at line 1:14: SCOPE\n scope!\n\n when 3\n # at line 1:20: FRAGMENT\n fragment!\n\n when 4\n # at line 1:29: TREE_BEGIN\n tree_begin!\n\n when 5\n # at line 1:40: ROOT\n root!\n\n when 6\n # at line 1:45: BANG\n bang!\n\n when 7\n # at line 1:50: RANGE\n range!\n\n when 8\n # at line 1:56: REWRITE\n rewrite!\n\n when 9\n # at line 1:64: AT\n at!\n\n when 10\n # at line 1:67: LABEL_ASSIGN\n label_assign!\n\n when 11\n # at line 1:80: LIST_LABEL_ASSIGN\n list_label_assign!\n\n when 12\n # at line 1:98: T__67\n t__67!\n\n when 13\n # at line 1:104: T__68\n t__68!\n\n when 14\n # at line 1:110: T__69\n t__69!\n\n when 15\n # at line 1:116: T__70\n t__70!\n\n when 16\n # at line 1:122: T__71\n t__71!\n\n when 17\n # at line 1:128: T__72\n t__72!\n\n when 18\n # at line 1:134: T__73\n t__73!\n\n when 19\n # at line 1:140: T__74\n t__74!\n\n when 20\n # at line 1:146: T__75\n t__75!\n\n when 21\n # at line 1:152: T__76\n t__76!\n\n when 22\n # at line 1:158: T__77\n t__77!\n\n when 23\n # at line 1:164: T__78\n t__78!\n\n when 24\n # at line 1:170: T__79\n t__79!\n\n when 25\n # at line 1:176: T__80\n t__80!\n\n when 26\n # at line 1:182: T__81\n t__81!\n\n when 27\n # at line 1:188: T__82\n t__82!\n\n when 28\n # at line 1:194: T__83\n t__83!\n\n when 29\n # at line 1:200: T__84\n t__84!\n\n when 30\n # at line 1:206: T__85\n t__85!\n\n when 31\n # at line 1:212: T__86\n t__86!\n\n when 32\n # at line 1:218: T__87\n t__87!\n\n when 33\n # at line 1:224: T__88\n t__88!\n\n when 34\n # at line 1:230: T__89\n t__89!\n\n when 35\n # at line 1:236: T__90\n t__90!\n\n when 36\n # at line 1:242: T__91\n t__91!\n\n when 37\n # at line 1:248: T__92\n t__92!\n\n when 38\n # at line 1:254: T__93\n t__93!\n\n when 39\n # at line 1:260: SL_COMMENT\n sl_comment!\n\n when 40\n # at line 1:271: ML_COMMENT\n ml_comment!\n\n when 41\n # at line 1:282: CHAR_LITERAL\n char_literal!\n\n when 42\n # at line 1:295: STRING_LITERAL\n string_literal!\n\n when 43\n # at line 1:310: DOUBLE_QUOTE_STRING_LITERAL\n double_quote_string_literal!\n\n when 44\n # at line 1:338: DOUBLE_ANGLE_STRING_LITERAL\n double_angle_string_literal!\n\n when 45\n # at line 1:366: INT\n int!\n\n when 46\n # at line 1:370: ARG_ACTION\n arg_action!\n\n when 47\n # at line 1:381: ACTION\n action!\n\n when 48\n # at line 1:388: TOKEN_REF\n token_ref!\n\n when 49\n # at line 1:398: RULE_REF\n rule_ref!\n\n when 50\n # at line 1:407: OPTIONS\n options!\n\n when 51\n # at line 1:415: TOKENS\n tokens!\n\n when 52\n # at line 1:422: WS\n ws!\n\n end\n end",
"def input!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 21 )\n\n\n\n type = INPUT\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 42:8: 'traer'\n match( \"traer\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 21 )\n\n\n end",
"def next_token\n return [false, false] if @src.eos?\n# p @src.rest if @yydebug\n if ret = @src.scan(EM_OPEN_RE)\n @pre << ret\n [:EM_OPEN, ret]\n elsif ret = @src.scan(EM_CLOSE_RE)\n @pre << ret\n [:EM_CLOSE, ret]\n elsif ret = @src.scan(CODE_OPEN_RE)\n @pre << ret\n [:CODE_OPEN, ret]\n elsif ret = @src.scan(CODE_CLOSE_RE)\n @pre << ret\n [:CODE_CLOSE, ret]\n elsif ret = @src.scan(VAR_OPEN_RE)\n @pre << ret\n [:VAR_OPEN, ret]\n elsif ret = @src.scan(VAR_CLOSE_RE)\n @pre << ret\n [:VAR_CLOSE, ret]\n elsif ret = @src.scan(KBD_OPEN_RE)\n @pre << ret\n [:KBD_OPEN, ret]\n elsif ret = @src.scan(KBD_CLOSE_RE)\n @pre << ret\n [:KBD_CLOSE, ret]\n elsif ret = @src.scan(INDEX_OPEN_RE)\n @pre << ret\n [:INDEX_OPEN, ret]\n elsif ret = @src.scan(INDEX_CLOSE_RE)\n @pre << ret\n [:INDEX_CLOSE, ret]\n elsif ret = @src.scan(REF_OPEN_RE)\n @pre << ret\n [:REF_OPEN, ret]\n elsif ret = @src.scan(REF_CLOSE_RE)\n @pre << ret\n [:REF_CLOSE, ret]\n elsif ret = @src.scan(FOOTNOTE_OPEN_RE)\n @pre << ret\n [:FOOTNOTE_OPEN, ret]\n elsif ret = @src.scan(FOOTNOTE_CLOSE_RE)\n @pre << ret\n [:FOOTNOTE_CLOSE, ret]\n elsif ret = @src.scan(VERB_OPEN_RE)\n @pre << ret\n [:VERB_OPEN, ret]\n elsif ret = @src.scan(VERB_CLOSE_RE)\n @pre << ret\n [:VERB_CLOSE, ret]\n elsif ret = @src.scan(BAR_RE)\n @pre << ret\n [:BAR, ret]\n elsif ret = @src.scan(QUOTE_RE)\n @pre << ret\n [:QUOTE, ret]\n elsif ret = @src.scan(SLASH_RE)\n @pre << ret\n [:SLASH, ret]\n elsif ret = @src.scan(BACK_SLASH_RE)\n @pre << ret\n [:BACK_SLASH, ret]\n elsif ret = @src.scan(URL_RE)\n @pre << ret\n [:URL, ret]\n elsif ret = @src.scan(OTHER_RE)\n @pre << ret\n [:OTHER, ret]\n else\n ret = @src.rest\n @pre << ret\n @src.terminate\n [:OTHER, ret]\n end\nend",
"def next_token\n #dputs \"@line: \" + @line\n if @state == :normal\n while true\n temp = _next_token\n unless temp == \"#white_space\" || temp == \"#comment\"\n break\n end\n end\n #dputs \"token: \" + temp\n @current_token = temp\n return temp\n else\n return :Terminate\n end\n \n end",
"def token!\n # at line 1:8: ( T__80 | T__81 | T__82 | EOL | LPAR | RPAR | LLAIZQ | LLADER | COMA | PUNTO | CORDER | CORIZQ | DELIM | ASIGNACION | DOUBLEDOT | COMILLA | OP_REL | OP_ARI | OP_LOG | K_ACCIONARCHIVO | K_ACCIONARREGLO | K_PACKAGE | K_CLASS | K_END | K_DEF | K_VAR | K_REQUIRE | K_IMPLEMENTS | K_EXTENDS | K_IMPRIMIR | K_CONVERSION | K_ASIGNACION | K_RETORNO | K_ACCIONSYS | K_INTERFACE | K_IF | K_TIMES | K_DO | K_EACH | K_ELSE | K_INVOKE | K_NEW | TIPO | K_REFERENCIA | K_INSPECCIONAR | K_MATEMATICA | K_NUM | K_RESIZE | K_ORDENAR | ModoOrd | K_BUSQUEDA | K_TIPOBUSQUEDA | K_WHERE | K_SPLIT | K_BEGIN | K_DIR | K_UNION | K_VISIBILIDAD | K_MODIFICADOR | K_ARRAY | K_PROPIEDAD | K_GET | K_SET | K_VALUE | K_INITIALIZE | K_FUNC | K_VOID | HexLiteral | DecimalLiteral | OctalLiteral | WS | LINE_COMMENT | Identificador )\n alt_28 = 73\n alt_28 = @dfa28.predict( @input )\n case alt_28\n when 1\n # at line 1:10: T__80\n t__80!\n\n\n when 2\n # at line 1:16: T__81\n t__81!\n\n\n when 3\n # at line 1:22: T__82\n t__82!\n\n\n when 4\n # at line 1:28: EOL\n eol!\n\n\n when 5\n # at line 1:32: LPAR\n lpar!\n\n\n when 6\n # at line 1:37: RPAR\n rpar!\n\n\n when 7\n # at line 1:42: LLAIZQ\n llaizq!\n\n\n when 8\n # at line 1:49: LLADER\n llader!\n\n\n when 9\n # at line 1:56: COMA\n coma!\n\n\n when 10\n # at line 1:61: PUNTO\n punto!\n\n\n when 11\n # at line 1:67: CORDER\n corder!\n\n\n when 12\n # at line 1:74: CORIZQ\n corizq!\n\n\n when 13\n # at line 1:81: DELIM\n delim!\n\n\n when 14\n # at line 1:87: ASIGNACION\n asignacion!\n\n\n when 15\n # at line 1:98: DOUBLEDOT\n doubledot!\n\n\n when 16\n # at line 1:108: COMILLA\n comilla!\n\n\n when 17\n # at line 1:116: OP_REL\n op_rel!\n\n\n when 18\n # at line 1:123: OP_ARI\n op_ari!\n\n\n when 19\n # at line 1:130: OP_LOG\n op_log!\n\n\n when 20\n # at line 1:137: K_ACCIONARCHIVO\n k_accionarchivo!\n\n\n when 21\n # at line 1:153: K_ACCIONARREGLO\n k_accionarreglo!\n\n\n when 22\n # at line 1:169: K_PACKAGE\n k_package!\n\n\n when 23\n # at line 1:179: K_CLASS\n k_class!\n\n\n when 24\n # at line 1:187: K_END\n k_end!\n\n\n when 25\n # at line 1:193: K_DEF\n k_def!\n\n\n when 26\n # at line 1:199: K_VAR\n k_var!\n\n\n when 27\n # at line 1:205: K_REQUIRE\n k_require!\n\n\n when 28\n # at line 1:215: K_IMPLEMENTS\n k_implements!\n\n\n when 29\n # at line 1:228: K_EXTENDS\n k_extends!\n\n\n when 30\n # at line 1:238: K_IMPRIMIR\n k_imprimir!\n\n\n when 31\n # at line 1:249: K_CONVERSION\n k_conversion!\n\n\n when 32\n # at line 1:262: K_ASIGNACION\n k_asignacion!\n\n\n when 33\n # at line 1:275: K_RETORNO\n k_retorno!\n\n\n when 34\n # at line 1:285: K_ACCIONSYS\n k_accionsys!\n\n\n when 35\n # at line 1:297: K_INTERFACE\n k_interface!\n\n\n when 36\n # at line 1:309: K_IF\n k_if!\n\n\n when 37\n # at line 1:314: K_TIMES\n k_times!\n\n\n when 38\n # at line 1:322: K_DO\n k_do!\n\n\n when 39\n # at line 1:327: K_EACH\n k_each!\n\n\n when 40\n # at line 1:334: K_ELSE\n k_else!\n\n\n when 41\n # at line 1:341: K_INVOKE\n k_invoke!\n\n\n when 42\n # at line 1:350: K_NEW\n k_new!\n\n\n when 43\n # at line 1:356: TIPO\n tipo!\n\n\n when 44\n # at line 1:361: K_REFERENCIA\n k_referencia!\n\n\n when 45\n # at line 1:374: K_INSPECCIONAR\n k_inspeccionar!\n\n\n when 46\n # at line 1:389: K_MATEMATICA\n k_matematica!\n\n\n when 47\n # at line 1:402: K_NUM\n k_num!\n\n\n when 48\n # at line 1:408: K_RESIZE\n k_resize!\n\n\n when 49\n # at line 1:417: K_ORDENAR\n k_ordenar!\n\n\n when 50\n # at line 1:427: ModoOrd\n modo_ord!\n\n\n when 51\n # at line 1:435: K_BUSQUEDA\n k_busqueda!\n\n\n when 52\n # at line 1:446: K_TIPOBUSQUEDA\n k_tipobusqueda!\n\n\n when 53\n # at line 1:461: K_WHERE\n k_where!\n\n\n when 54\n # at line 1:469: K_SPLIT\n k_split!\n\n\n when 55\n # at line 1:477: K_BEGIN\n k_begin!\n\n\n when 56\n # at line 1:485: K_DIR\n k_dir!\n\n\n when 57\n # at line 1:491: K_UNION\n k_union!\n\n\n when 58\n # at line 1:499: K_VISIBILIDAD\n k_visibilidad!\n\n\n when 59\n # at line 1:513: K_MODIFICADOR\n k_modificador!\n\n\n when 60\n # at line 1:527: K_ARRAY\n k_array!\n\n\n when 61\n # at line 1:535: K_PROPIEDAD\n k_propiedad!\n\n\n when 62\n # at line 1:547: K_GET\n k_get!\n\n\n when 63\n # at line 1:553: K_SET\n k_set!\n\n\n when 64\n # at line 1:559: K_VALUE\n k_value!\n\n\n when 65\n # at line 1:567: K_INITIALIZE\n k_initialize!\n\n\n when 66\n # at line 1:580: K_FUNC\n k_func!\n\n\n when 67\n # at line 1:587: K_VOID\n k_void!\n\n\n when 68\n # at line 1:594: HexLiteral\n hex_literal!\n\n\n when 69\n # at line 1:605: DecimalLiteral\n decimal_literal!\n\n\n when 70\n # at line 1:620: OctalLiteral\n octal_literal!\n\n\n when 71\n # at line 1:633: WS\n ws!\n\n\n when 72\n # at line 1:636: LINE_COMMENT\n line_comment!\n\n\n when 73\n # at line 1:649: Identificador\n identificador!\n\n\n end\n end",
"def next_token\n\n token = nil\n\n until ss.eos? or token do\n token =\n case state\n when nil then\n case\n when text = ss.scan(/select/i) then\n action { [:kw_select, text] }\n when text = ss.scan(/from/i) then\n action { [:kw_from, text] }\n when text = ss.scan(/where/i) then\n action { [:kw_where, text] }\n when text = ss.scan(/null/i) then\n action { [:kw_null, text] }\n when text = ss.scan(/not/i) then\n action { [:kw_not, text] }\n when text = ss.scan(/limit/i) then\n action { [:kw_limit, text] }\n when text = ss.scan(/offset/i) then\n action { [:kw_offset, text] }\n when text = ss.scan(/insert/i) then\n action { [:kw_insert, text] }\n when text = ss.scan(/into/i) then\n action { [:kw_into, text] }\n when text = ss.scan(/values/i) then\n action { [:kw_values, text] }\n when text = ss.scan(/create/i) then\n action { [:kw_create, text] }\n when text = ss.scan(/table/i) then\n action { [:kw_table, text] }\n when text = ss.scan(/constraint/i) then\n action { [:kw_constraint, text] }\n when text = ss.scan(/key/i) then\n action { [:kw_key, text] }\n when text = ss.scan(/primary/i) then\n action { [:kw_primary, text] }\n when text = ss.scan(/\\*/) then\n action { [:star, text] }\n when text = ss.scan(/\\(/) then\n action { [:lparen, text] }\n when text = ss.scan(/\\)/) then\n action { [:rparen, text] }\n when ss.skip(/\\s+/) then\n # do nothing\n when text = ss.scan(/or/i) then\n action { [:op_or, text] }\n when text = ss.scan(/and/i) then\n action { [:op_and, text] }\n when text = ss.scan(/[a-zA-Z]\\w*/) then\n action { [:identifier, text] }\n when text = ss.scan(/=/) then\n action { [:op_equal, text] }\n when text = ss.scan(/<=/) then\n action { [:op_lte, text] }\n when text = ss.scan(/>=/) then\n action { [:op_gte, text] }\n when text = ss.scan(/</) then\n action { [:op_lt, text] }\n when text = ss.scan(/>/) then\n action { [:op_gt, text] }\n when text = ss.scan(/;/) then\n action { [:semicolon, text] }\n when text = ss.scan(/,/) then\n action { [:comma, text] }\n when text = ss.scan(/\\./) then\n action { [:period, text] }\n when text = ss.scan(/\\d+/) then\n action { [:integer, text.to_i] }\n when ss.skip(/'/) then\n [:state, :QUOTE]\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n when :QUOTE then\n case\n when ss.skip(/'/) then\n [:state, nil]\n when text = ss.scan(/[^']+/) then\n action { [:string, text] }\n else\n text = ss.string[ss.pos .. -1]\n raise ScanError, \"can not match (#{state.inspect}) at #{location}: '#{text}'\"\n end\n else\n raise ScanError, \"undefined state at #{location}: '#{state}'\"\n end # token = case state\n\n next unless token # allow functions to trigger redo w/ nil\n end # while\n\n raise LexerError, \"bad lexical result at #{location}: #{token.inspect}\" unless\n token.nil? || (Array === token && token.size >= 2)\n\n # auto-switch state\n self.state = token.last if token && token.first == :state\n\n token\n end",
"def lex\n return enum_for(__method__) unless block_given?\n @src.each_line do |line|\n yield match(line)\n @lineno += 1\n end\n eof = Line.new(\"\", @lineno, :eof, \"\", \"\", \"\")\n loop { yield eof }\n end",
"def next_token\n @state = 1\n value = \"\"\n recovery_data = [0, 0]\n\n while !@stream.eof?\n char = @stream.read(1)\n next_state = get_next_state(char)\n\n # Move to the next state.\n if next_state\n if recognizable?\n recovery_data = [@state, 0]\n end\n\n value << char\n recovery_data[1] += 1\n @state = next_state\n else\n # Recognise the final token.\n if recognizable?\n @stream.seek(@stream.pos - 1)\n break\n else\n # Recoverable error.\n if recovery_data[0] > 0\n value = recover_from_error!(recovery_data, value)\n break\n # Fatal lexical error.\n else\n raise Bolverk::ASM::LexicalError, \"Disallowed token: #{char} on line #{@stream.line_number}\"\n end\n end\n end\n end\n\n build_token(value)\n end",
"def next_token\n raise NotImplementedError\n end",
"def token!\n # at line 1:8: ( T__30 | T__31 | T__32 | T__33 | T__34 | T__35 | T__36 | T__37 | T__38 | T__39 | T__40 | T__41 | T__42 | T__43 | T__44 | T__45 | T__46 | T__47 | T__48 | T__49 | T__50 | T__51 | T__52 | T__53 | T__54 | INTEGER | FLOAT | BOOLEAN | STRING | CHAR | INIT | OPEN | CLOSE | TYPE_INT | TYPE_FLOAT | TYPE_STRING | TYPE_BOOL | TYPE_VOID | ENTITY | COMPONENT | SYSTEM | ENUM | IF | ELSE | RETURN | WHILE | IDENT | WS | COMMENT | MULTILINE_COMMENT | NL )\n alt_9 = 51\n alt_9 = @dfa9.predict( @input )\n case alt_9\n when 1\n # at line 1:10: T__30\n t__30!\n\n\n when 2\n # at line 1:16: T__31\n t__31!\n\n\n when 3\n # at line 1:22: T__32\n t__32!\n\n\n when 4\n # at line 1:28: T__33\n t__33!\n\n\n when 5\n # at line 1:34: T__34\n t__34!\n\n\n when 6\n # at line 1:40: T__35\n t__35!\n\n\n when 7\n # at line 1:46: T__36\n t__36!\n\n\n when 8\n # at line 1:52: T__37\n t__37!\n\n\n when 9\n # at line 1:58: T__38\n t__38!\n\n\n when 10\n # at line 1:64: T__39\n t__39!\n\n\n when 11\n # at line 1:70: T__40\n t__40!\n\n\n when 12\n # at line 1:76: T__41\n t__41!\n\n\n when 13\n # at line 1:82: T__42\n t__42!\n\n\n when 14\n # at line 1:88: T__43\n t__43!\n\n\n when 15\n # at line 1:94: T__44\n t__44!\n\n\n when 16\n # at line 1:100: T__45\n t__45!\n\n\n when 17\n # at line 1:106: T__46\n t__46!\n\n\n when 18\n # at line 1:112: T__47\n t__47!\n\n\n when 19\n # at line 1:118: T__48\n t__48!\n\n\n when 20\n # at line 1:124: T__49\n t__49!\n\n\n when 21\n # at line 1:130: T__50\n t__50!\n\n\n when 22\n # at line 1:136: T__51\n t__51!\n\n\n when 23\n # at line 1:142: T__52\n t__52!\n\n\n when 24\n # at line 1:148: T__53\n t__53!\n\n\n when 25\n # at line 1:154: T__54\n t__54!\n\n\n when 26\n # at line 1:160: INTEGER\n integer!\n\n\n when 27\n # at line 1:168: FLOAT\n float!\n\n\n when 28\n # at line 1:174: BOOLEAN\n boolean!\n\n\n when 29\n # at line 1:182: STRING\n string!\n\n\n when 30\n # at line 1:189: CHAR\n char!\n\n\n when 31\n # at line 1:194: INIT\n init!\n\n\n when 32\n # at line 1:199: OPEN\n open!\n\n\n when 33\n # at line 1:204: CLOSE\n close!\n\n\n when 34\n # at line 1:210: TYPE_INT\n type_int!\n\n\n when 35\n # at line 1:219: TYPE_FLOAT\n type_float!\n\n\n when 36\n # at line 1:230: TYPE_STRING\n type_string!\n\n\n when 37\n # at line 1:242: TYPE_BOOL\n type_bool!\n\n\n when 38\n # at line 1:252: TYPE_VOID\n type_void!\n\n\n when 39\n # at line 1:262: ENTITY\n entity!\n\n\n when 40\n # at line 1:269: COMPONENT\n component!\n\n\n when 41\n # at line 1:279: SYSTEM\n system!\n\n\n when 42\n # at line 1:286: ENUM\n enum!\n\n\n when 43\n # at line 1:291: IF\n if!\n\n\n when 44\n # at line 1:294: ELSE\n else!\n\n\n when 45\n # at line 1:299: RETURN\n return!\n\n\n when 46\n # at line 1:306: WHILE\n while!\n\n\n when 47\n # at line 1:312: IDENT\n ident!\n\n\n when 48\n # at line 1:318: WS\n ws!\n\n\n when 49\n # at line 1:321: COMMENT\n comment!\n\n\n when 50\n # at line 1:329: MULTILINE_COMMENT\n multiline_comment!\n\n\n when 51\n # at line 1:347: NL\n nl!\n\n\n end\n end",
"def lex(&block)\n loop do\n token = self.next\n yield token\n break if token.eof?\n end\n end",
"def next_item\n lexeme, token = @lexer.next, nil\n if lexeme[0].nil?\n token = { type: :eof }\n elsif lexeme[0].lol_string?\n token = { type: :string, data: lexeme[0][1..-2] }\n elsif lexeme[0].lol_integer?\n token = { type: :integer, data: lexeme[0].to_i }\n elsif lexeme[0].lol_float?\n token = { type: :float, data: lexeme[0].to_f }\n elsif lexeme[0].lol_boolean?\n token = { type: :boolean, data: (lexeme[0] == 'WIN') }\n elsif lexeme[0] == '!'\n token = { type: :exclamation }\n elsif lexeme[0] == \"\\n\"\n token = { type: :newline }\n else\n # Try to match keyword\n token_type = match_longest(lexeme[0], @token_table)\n unless token_type.nil?\n token = { type: token_type }\n # Consume all peeked lexemes\n token_type.to_s.count('_').times { @lexer.next }\n else\n # Try to match identifier\n if lexeme[0].lol_identifier?\n token = { type: :identifier, data: lexeme[0] }\n end\n end\n end\n raise UnknownTokenError.new(lexeme) if token.nil?\n token.merge(line: lexeme[1], pos: lexeme[2])\n end",
"def next_token(options = { should_advance?: true })\n if @current_scope.type == Scope::TYPE_MAIN && @current_scope.current_token.nil?\n token = @lexer.next_token\n @current_scope.tokens << token unless token.nil?\n end\n\n token = @current_scope.current_token\n\n advance token if options[:should_advance?] && token\n\n token\n end",
"def get_token\n return nil if @token_index >= @arguments.size\n\n begin\n case chr(@arguments[@token_index])\n when \"[\"\n return \"statement\", gen_substatement\n\n when \"]\"\n return \"]\"\n\n when \"(\"\n return \"(\", \"(\"\n\n when \")\"\n return \")\", \")\"\n\n when \"n\"\n if (chr(@arguments[@token_index + 1]) == \"o\") && (chr(@arguments[@token_index + 2]) == \"t\") && ((chr(@arguments[@token_index + 3]) == \" \") || (chr(@arguments[@token_index + 3]) == \"(\"))\n @token_index += 2\n return \"not\", \"not\"\n else\n gen_statement\n end\n\n when \"!\"\n return \"not\", \"not\"\n\n when \"a\"\n if (chr(@arguments[@token_index + 1]) == \"n\") && (chr(@arguments[@token_index + 2]) == \"d\") && ((chr(@arguments[@token_index + 3]) == \" \") || (chr(@arguments[@token_index + 3]) == \"(\"))\n @token_index += 2\n return \"and\", \"and\"\n else\n gen_statement\n end\n\n when \"&\"\n if chr(@arguments[@token_index + 1]) == \"&\"\n @token_index += 1\n return \"and\", \"and\"\n else\n gen_statement\n end\n\n when \"o\"\n if (chr(@arguments[@token_index + 1]) == \"r\") && ((chr(@arguments[@token_index + 2]) == \" \") || (chr(@arguments[@token_index + 2]) == \"(\"))\n @token_index += 1\n return \"or\", \"or\"\n else\n gen_statement\n end\n\n when \"|\"\n if chr(@arguments[@token_index + 1]) == \"|\"\n @token_index += 1\n return \"or\", \"or\"\n else\n gen_statement\n end\n\n when \"+\"\n value = \"\"\n i = @token_index + 1\n\n begin\n value += chr(@arguments[i])\n i += 1\n end until (i >= @arguments.size) || (chr(@arguments[i]) =~ /\\s|\\)/)\n\n @token_index = i - 1\n return \"+\", value\n\n when \"-\"\n value = \"\"\n i = @token_index + 1\n\n begin\n value += chr(@arguments[i])\n i += 1\n end until (i >= @arguments.size) || (chr(@arguments[i]) =~ /\\s|\\)/)\n\n @token_index = i - 1\n return \"-\", value\n\n when \" \"\n return \" \", \" \"\n\n else\n gen_statement\n end\n end\n rescue NoMethodError\n raise \"Error. Expression cannot be parsed.\"\n end",
"def grammar_def\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 1)\n return_value = GrammarDefReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n g = nil\n __DOC_COMMENT1__ = nil\n string_literal2 = nil\n string_literal3 = nil\n string_literal4 = nil\n char_literal6 = nil\n __EOF12__ = nil\n id5 = nil\n options_spec7 = nil\n tokens_spec8 = nil\n attr_scope9 = nil\n action10 = nil\n rule11 = nil\n\n tree_for_g = nil\n tree_for_DOC_COMMENT1 = nil\n tree_for_string_literal2 = nil\n tree_for_string_literal3 = nil\n tree_for_string_literal4 = nil\n tree_for_char_literal6 = nil\n tree_for_EOF12 = nil\n stream_T__68 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__68\")\n stream_DOC_COMMENT = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token DOC_COMMENT\")\n stream_T__69 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__69\")\n stream_T__67 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__67\")\n stream_T__71 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__71\")\n stream_T__70 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__70\")\n stream_EOF = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token EOF\")\n stream_id = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule id\")\n stream_tokens_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule tokens_spec\")\n stream_rule = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule rule\")\n stream_options_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule options_spec\")\n stream_action = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule action\")\n stream_attr_scope = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule attr_scope\")\n begin\n # at line 95:9: ( DOC_COMMENT )? ( 'lexer' | 'parser' | 'tree' | ) g= 'grammar' id ';' ( options_spec )? ( tokens_spec )? ( attr_scope )* ( action )* ( rule )+ EOF\n # at line 95:9: ( DOC_COMMENT )?\n alt_1 = 2\n look_1_0 = @input.peek(1)\n\n if (look_1_0 == DOC_COMMENT) \n alt_1 = 1\n end\n case alt_1\n when 1\n # at line 95:9: DOC_COMMENT\n __DOC_COMMENT1__ = match(DOC_COMMENT, TOKENS_FOLLOWING_DOC_COMMENT_IN_grammar_def_295) \n if @state.backtracking == 0\n stream_DOC_COMMENT.add(__DOC_COMMENT1__)\n end\n\n end\n # at line 96:6: ( 'lexer' | 'parser' | 'tree' | )\n alt_2 = 4\n case look_2 = @input.peek(1)\n when T__67 then alt_2 = 1\n when T__68 then alt_2 = 2\n when T__69 then alt_2 = 3\n when T__70 then alt_2 = 4\n else\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n\n nvae = NoViableAlternative(\"\", 2, 0)\n raise nvae\n end\n case alt_2\n when 1\n # at line 96:8: 'lexer'\n string_literal2 = match(T__67, TOKENS_FOLLOWING_T__67_IN_grammar_def_305) \n if @state.backtracking == 0\n stream_T__67.add(string_literal2)\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = LEXER_GRAMMAR \n # <-- action\n end\n\n when 2\n # at line 97:8: 'parser'\n string_literal3 = match(T__68, TOKENS_FOLLOWING_T__68_IN_grammar_def_321) \n if @state.backtracking == 0\n stream_T__68.add(string_literal3)\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = PARSER_GRAMMAR \n # <-- action\n end\n\n when 3\n # at line 98:8: 'tree'\n string_literal4 = match(T__69, TOKENS_FOLLOWING_T__69_IN_grammar_def_333) \n if @state.backtracking == 0\n stream_T__69.add(string_literal4)\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = TREE_GRAMMAR \n # <-- action\n end\n\n when 4\n # at line 99:16: \n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = COMBINED_GRAMMAR \n # <-- action\n end\n\n end\n g = match(T__70, TOKENS_FOLLOWING_T__70_IN_grammar_def_375) \n if @state.backtracking == 0\n stream_T__70.add(g)\n end\n @state.following.push(TOKENS_FOLLOWING_id_IN_grammar_def_377)\n id5 = id\n @state.following.pop\n if @state.backtracking == 0\n stream_id.add(id5.tree)\n end\n char_literal6 = match(T__71, TOKENS_FOLLOWING_T__71_IN_grammar_def_379) \n if @state.backtracking == 0\n stream_T__71.add(char_literal6)\n end\n # at line 101:25: ( options_spec )?\n alt_3 = 2\n look_3_0 = @input.peek(1)\n\n if (look_3_0 == OPTIONS) \n alt_3 = 1\n end\n case alt_3\n when 1\n # at line 101:25: options_spec\n @state.following.push(TOKENS_FOLLOWING_options_spec_IN_grammar_def_381)\n options_spec7 = options_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_options_spec.add(options_spec7.tree)\n end\n\n end\n # at line 101:39: ( tokens_spec )?\n alt_4 = 2\n look_4_0 = @input.peek(1)\n\n if (look_4_0 == TOKENS) \n alt_4 = 1\n end\n case alt_4\n when 1\n # at line 101:39: tokens_spec\n @state.following.push(TOKENS_FOLLOWING_tokens_spec_IN_grammar_def_384)\n tokens_spec8 = tokens_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_tokens_spec.add(tokens_spec8.tree)\n end\n\n end\n # at line 101:52: ( attr_scope )*\n loop do #loop 5\n alt_5 = 2\n look_5_0 = @input.peek(1)\n\n if (look_5_0 == SCOPE) \n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line 101:52: attr_scope\n @state.following.push(TOKENS_FOLLOWING_attr_scope_IN_grammar_def_387)\n attr_scope9 = attr_scope\n @state.following.pop\n if @state.backtracking == 0\n stream_attr_scope.add(attr_scope9.tree)\n end\n\n else\n break #loop 5\n end\n end\n # at line 101:64: ( action )*\n loop do #loop 6\n alt_6 = 2\n look_6_0 = @input.peek(1)\n\n if (look_6_0 == AT) \n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line 101:64: action\n @state.following.push(TOKENS_FOLLOWING_action_IN_grammar_def_390)\n action10 = action\n @state.following.pop\n if @state.backtracking == 0\n stream_action.add(action10.tree)\n end\n\n else\n break #loop 6\n end\n end\n # at file 102:6: ( rule )+\n match_count_7 = 0\n loop do\n alt_7 = 2\n look_7_0 = @input.peek(1)\n\n if (look_7_0 == DOC_COMMENT || look_7_0 == FRAGMENT || look_7_0 == TOKEN_REF || look_7_0 == RULE_REF || look_7_0.between?(T__75, T__77)) \n alt_7 = 1\n\n end\n case alt_7\n when 1\n # at line 102:6: rule\n @state.following.push(TOKENS_FOLLOWING_rule_IN_grammar_def_398)\n rule11 = rule\n @state.following.pop\n if @state.backtracking == 0\n stream_rule.add(rule11.tree)\n end\n\n else\n match_count_7 > 0 and break\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n\n eee = EarlyExit(7)\n\n\n raise eee\n end\n match_count_7 += 1\n end\n\n __EOF12__ = match(EOF, TOKENS_FOLLOWING_EOF_IN_grammar_def_406) \n if @state.backtracking == 0\n stream_EOF.add(__EOF12__)\n end\n # AST Rewrite\n # elements: attr_scope, id, tokens_spec, action, options_spec, rule, DOC_COMMENT\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 104:6: -> ^( id ( DOC_COMMENT )? ( options_spec )? ( tokens_spec )? ( attr_scope )* ( action )* ( rule )+ )\n # at line 104:9: ^( id ( DOC_COMMENT )? ( options_spec )? ( tokens_spec )? ( attr_scope )* ( action )* ( rule )+ )\n root_1 = @adaptor.create_flat_list!\n root_1 = @adaptor.become_root(@adaptor.create!(@grammar_type, g) , root_1)\n\n @adaptor.add_child(root_1, stream_id.next_tree)\n # at line 105:12: ( DOC_COMMENT )?\n if stream_DOC_COMMENT.has_next?\n @adaptor.add_child(root_1, stream_DOC_COMMENT.next_node)\n\n end\n\n stream_DOC_COMMENT.reset();\n # at line 105:25: ( options_spec )?\n if stream_options_spec.has_next?\n @adaptor.add_child(root_1, stream_options_spec.next_tree)\n\n end\n\n stream_options_spec.reset();\n # at line 105:39: ( tokens_spec )?\n if stream_tokens_spec.has_next?\n @adaptor.add_child(root_1, stream_tokens_spec.next_tree)\n\n end\n\n stream_tokens_spec.reset();\n # at line 105:52: ( attr_scope )*\n while stream_attr_scope.has_next?\n @adaptor.add_child(root_1, stream_attr_scope.next_tree)\n\n end\n\n stream_attr_scope.reset();\n # at line 105:64: ( action )*\n while stream_action.has_next?\n @adaptor.add_child(root_1, stream_action.next_tree)\n\n end\n\n stream_action.reset();\n # at line 105:72: ( rule )+\n unless stream_rule.has_next?\n raise ANTLR3::RewriteEarlyExit\n end\n\n while stream_rule.has_next?\n @adaptor.add_child(root_1, stream_rule.next_tree)\n\n end\n\n stream_rule.reset\n\n @adaptor.add_child(root_0, root_1)\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing(root_0)\n @adaptor.set_token_boundaries(return_value.tree, return_value.start, return_value.stop)\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node!(@input, return_value.start, @input.look(-1), re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 1)\n\n end\n \n return return_value\n end",
"def k_accionarreglo!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 21 )\n\n\n\n type = K_ACCIONARREGLO\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 337:4: ( 'push' | 'pop' | 'remove' | 'next' | 'prev' | 'val' | 'eval' )\n # at line 337:4: ( 'push' | 'pop' | 'remove' | 'next' | 'prev' | 'val' | 'eval' )\n alt_4 = 7\n case look_4 = @input.peek( 1 )\n when 0x70 then case look_4 = @input.peek( 2 )\n when 0x75 then alt_4 = 1\n when 0x6f then alt_4 = 2\n when 0x72 then alt_4 = 5\n else\n raise NoViableAlternative( \"\", 4, 1 )\n\n end\n when 0x72 then alt_4 = 3\n when 0x6e then alt_4 = 4\n when 0x76 then alt_4 = 6\n when 0x65 then alt_4 = 7\n else\n raise NoViableAlternative( \"\", 4, 0 )\n\n end\n case alt_4\n when 1\n # at line 337:5: 'push'\n match( \"push\" )\n\n\n when 2\n # at line 337:12: 'pop'\n match( \"pop\" )\n\n\n when 3\n # at line 337:18: 'remove'\n match( \"remove\" )\n\n\n when 4\n # at line 337:27: 'next'\n match( \"next\" )\n\n\n when 5\n # at line 337:34: 'prev'\n match( \"prev\" )\n\n\n when 6\n # at line 337:41: 'val'\n match( \"val\" )\n\n\n when 7\n # at line 337:47: 'eval'\n match( \"eval\" )\n\n\n end\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 21 )\n\n\n end",
"def tokenize input\n setup_scanner input\n\n until @s.eos? do\n pos = @s.pos\n\n # leading spaces will be reflected by the column of the next token\n # the only thing we loose are trailing spaces at the end of the file\n next if @s.scan(/ +/)\n\n # note: after BULLET, LABEL, etc.,\n # indent will be the column of the next non-newline token\n\n @tokens << case\n # [CR]LF => :NEWLINE\n when @s.scan(/\\r?\\n/) then\n token = [:NEWLINE, @s.matched, *pos]\n @s.newline!\n token\n # === text => :HEADER then :TEXT\n when @s.scan(/(=+)(\\s*)/) then\n level = @s[1].length\n header = [:HEADER, level, *pos]\n\n if @s[2] =~ /^\\r?\\n/ then\n @s.unscan(@s[2])\n header\n else\n pos = @s.pos\n @s.scan(/.*/)\n @tokens << header\n [:TEXT, @s.matched.sub(/\\r$/, ''), *pos]\n end\n # --- (at least 3) and nothing else on the line => :RULE\n when @s.scan(/(-{3,}) *\\r?$/) then\n [:RULE, @s[1].length - 2, *pos]\n # * or - followed by white space and text => :BULLET\n when @s.scan(/([*-]) +(\\S)/) then\n @s.unscan(@s[2])\n [:BULLET, @s[1], *pos]\n # A. text, a. text, 12. text => :UALPHA, :LALPHA, :NUMBER\n when @s.scan(/([a-z]|\\d+)\\. +(\\S)/i) then\n # FIXME if tab(s), the column will be wrong\n # either support tabs everywhere by first expanding them to\n # spaces, or assume that they will have been replaced\n # before (and provide a check for that at least in debug\n # mode)\n list_label = @s[1]\n @s.unscan(@s[2])\n list_type =\n case list_label\n when /[a-z]/ then :LALPHA\n when /[A-Z]/ then :UALPHA\n when /\\d/ then :NUMBER\n else\n raise ParseError, \"BUG token #{list_label}\"\n end\n [list_type, list_label, *pos]\n # [text] followed by spaces or end of line => :LABEL\n when @s.scan(/\\[(.*?)\\]( +|\\r?$)/) then\n [:LABEL, @s[1], *pos]\n # text:: followed by spaces or end of line => :NOTE\n when @s.scan(/(.*?)::( +|\\r?$)/) then\n [:NOTE, @s[1], *pos]\n # >>> followed by end of line => :BLOCKQUOTE\n when @s.scan(/>>> *(\\w+)?$/) then\n [:BLOCKQUOTE, @s[1], *pos]\n # anything else: :TEXT\n else\n @s.scan(/(.*?)( )?\\r?$/)\n token = [:TEXT, @s[1], *pos]\n\n if @s[2] then\n @tokens << token\n [:BREAK, @s[2], pos[0] + @s[1].length, pos[1]]\n else\n token\n end\n end\n end\n\n self\n end",
"def grammar_def\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n return_value = GrammarDefReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n g = nil\n __DOC_COMMENT1__ = nil\n string_literal2 = nil\n string_literal3 = nil\n string_literal4 = nil\n char_literal6 = nil\n __EOF12__ = nil\n id5 = nil\n options_spec7 = nil\n tokens_spec8 = nil\n attr_scope9 = nil\n action10 = nil\n rule11 = nil\n\n tree_for_g = nil\n tree_for_DOC_COMMENT1 = nil\n tree_for_string_literal2 = nil\n tree_for_string_literal3 = nil\n tree_for_string_literal4 = nil\n tree_for_char_literal6 = nil\n tree_for_EOF12 = nil\n stream_T__68 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__68\" )\n stream_DOC_COMMENT = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token DOC_COMMENT\" )\n stream_T__69 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__69\" )\n stream_T__67 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__67\" )\n stream_T__71 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__71\" )\n stream_T__70 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__70\" )\n stream_EOF = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token EOF\" )\n stream_id = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule id\" )\n stream_tokens_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule tokens_spec\" )\n stream_rule = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule rule\" )\n stream_options_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule options_spec\" )\n stream_action = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule action\" )\n stream_attr_scope = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule attr_scope\" )\n begin\n # at line 86:9: ( DOC_COMMENT )? ( 'lexer' | 'parser' | 'tree' | ) g= 'grammar' id ';' ( options_spec )? ( tokens_spec )? ( attr_scope )* ( action )* ( rule )+ EOF\n # at line 86:9: ( DOC_COMMENT )?\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0 == DOC_COMMENT )\n alt_1 = 1\n end\n case alt_1\n when 1\n # at line 86:9: DOC_COMMENT\n __DOC_COMMENT1__ = match( DOC_COMMENT, TOKENS_FOLLOWING_DOC_COMMENT_IN_grammar_def_290 )\n if @state.backtracking == 0\n stream_DOC_COMMENT.add( __DOC_COMMENT1__ )\n end\n\n end\n # at line 87:6: ( 'lexer' | 'parser' | 'tree' | )\n alt_2 = 4\n case look_2 = @input.peek( 1 )\n when T__67 then alt_2 = 1\n when T__68 then alt_2 = 2\n when T__69 then alt_2 = 3\n when T__70 then alt_2 = 4\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n raise NoViableAlternative( \"\", 2, 0 )\n end\n case alt_2\n when 1\n # at line 87:8: 'lexer'\n string_literal2 = match( T__67, TOKENS_FOLLOWING_T__67_IN_grammar_def_300 )\n if @state.backtracking == 0\n stream_T__67.add( string_literal2 )\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = LEXER_GRAMMAR \n # <-- action\n end\n\n when 2\n # at line 88:8: 'parser'\n string_literal3 = match( T__68, TOKENS_FOLLOWING_T__68_IN_grammar_def_316 )\n if @state.backtracking == 0\n stream_T__68.add( string_literal3 )\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = PARSER_GRAMMAR \n # <-- action\n end\n\n when 3\n # at line 89:8: 'tree'\n string_literal4 = match( T__69, TOKENS_FOLLOWING_T__69_IN_grammar_def_328 )\n if @state.backtracking == 0\n stream_T__69.add( string_literal4 )\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = TREE_GRAMMAR \n # <-- action\n end\n\n when 4\n # at line 90:16: \n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @grammar_type = COMBINED_GRAMMAR \n # <-- action\n end\n\n end\n g = match( T__70, TOKENS_FOLLOWING_T__70_IN_grammar_def_370 )\n if @state.backtracking == 0\n stream_T__70.add( g )\n end\n @state.following.push( TOKENS_FOLLOWING_id_IN_grammar_def_372 )\n id5 = id\n @state.following.pop\n if @state.backtracking == 0\n stream_id.add( id5.tree )\n end\n char_literal6 = match( T__71, TOKENS_FOLLOWING_T__71_IN_grammar_def_374 )\n if @state.backtracking == 0\n stream_T__71.add( char_literal6 )\n end\n # at line 92:25: ( options_spec )?\n alt_3 = 2\n look_3_0 = @input.peek( 1 )\n\n if ( look_3_0 == OPTIONS )\n alt_3 = 1\n end\n case alt_3\n when 1\n # at line 92:25: options_spec\n @state.following.push( TOKENS_FOLLOWING_options_spec_IN_grammar_def_376 )\n options_spec7 = options_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_options_spec.add( options_spec7.tree )\n end\n\n end\n # at line 92:39: ( tokens_spec )?\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0 == TOKENS )\n alt_4 = 1\n end\n case alt_4\n when 1\n # at line 92:39: tokens_spec\n @state.following.push( TOKENS_FOLLOWING_tokens_spec_IN_grammar_def_379 )\n tokens_spec8 = tokens_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_tokens_spec.add( tokens_spec8.tree )\n end\n\n end\n # at line 92:52: ( attr_scope )*\n while true # decision 5\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0 == SCOPE )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line 92:52: attr_scope\n @state.following.push( TOKENS_FOLLOWING_attr_scope_IN_grammar_def_382 )\n attr_scope9 = attr_scope\n @state.following.pop\n if @state.backtracking == 0\n stream_attr_scope.add( attr_scope9.tree )\n end\n\n else\n break # out of loop for decision 5\n end\n end # loop for decision 5\n # at line 92:64: ( action )*\n while true # decision 6\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0 == AT )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line 92:64: action\n @state.following.push( TOKENS_FOLLOWING_action_IN_grammar_def_385 )\n action10 = action\n @state.following.pop\n if @state.backtracking == 0\n stream_action.add( action10.tree )\n end\n\n else\n break # out of loop for decision 6\n end\n end # loop for decision 6\n # at file 93:6: ( rule )+\n match_count_7 = 0\n while true\n alt_7 = 2\n look_7_0 = @input.peek( 1 )\n\n if ( look_7_0 == DOC_COMMENT || look_7_0 == FRAGMENT || look_7_0 == TOKEN_REF || look_7_0 == RULE_REF || look_7_0.between?( T__75, T__77 ) )\n alt_7 = 1\n\n end\n case alt_7\n when 1\n # at line 93:6: rule\n @state.following.push( TOKENS_FOLLOWING_rule_IN_grammar_def_393 )\n rule11 = rule\n @state.following.pop\n if @state.backtracking == 0\n stream_rule.add( rule11.tree )\n end\n\n else\n match_count_7 > 0 and break\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n eee = EarlyExit(7)\n\n\n raise eee\n end\n match_count_7 += 1\n end\n\n __EOF12__ = match( EOF, TOKENS_FOLLOWING_EOF_IN_grammar_def_401 )\n if @state.backtracking == 0\n stream_EOF.add( __EOF12__ )\n end\n # AST Rewrite\n # elements: id, rule, options_spec, DOC_COMMENT, action, attr_scope, tokens_spec\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 95:6: -> ^( id ( DOC_COMMENT )? ( options_spec )? ( tokens_spec )? ( attr_scope )* ( action )* ( rule )+ )\n # at line 95:9: ^( id ( DOC_COMMENT )? ( options_spec )? ( tokens_spec )? ( attr_scope )* ( action )* ( rule )+ )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( ( @adaptor.create(@grammar_type, g) ), root_1 )\n\n @adaptor.add_child( root_1, stream_id.next_tree )\n # at line 96:12: ( DOC_COMMENT )?\n if stream_DOC_COMMENT.has_next?\n @adaptor.add_child( root_1, stream_DOC_COMMENT.next_node )\n\n end\n\n stream_DOC_COMMENT.reset();\n # at line 96:25: ( options_spec )?\n if stream_options_spec.has_next?\n @adaptor.add_child( root_1, stream_options_spec.next_tree )\n\n end\n\n stream_options_spec.reset();\n # at line 96:39: ( tokens_spec )?\n if stream_tokens_spec.has_next?\n @adaptor.add_child( root_1, stream_tokens_spec.next_tree )\n\n end\n\n stream_tokens_spec.reset();\n # at line 96:52: ( attr_scope )*\n while stream_attr_scope.has_next?\n @adaptor.add_child( root_1, stream_attr_scope.next_tree )\n\n end\n\n stream_attr_scope.reset();\n # at line 96:64: ( action )*\n while stream_action.has_next?\n @adaptor.add_child( root_1, stream_action.next_tree )\n\n end\n\n stream_action.reset();\n # at line 96:72: ( rule )+\n stream_rule.has_next? or raise ANTLR3::RewriteEarlyExit\n\n while stream_rule.has_next?\n @adaptor.add_child( root_1, stream_rule.next_tree )\n\n end\n stream_rule.reset\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end\n \n return return_value\n end",
"def rule\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 10)\n @rule_stack.push(Scoperule.new)\n return_value = RuleReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n modifier = nil\n arg = nil\n rt = nil\n __DOC_COMMENT39__ = nil\n string_literal40 = nil\n string_literal41 = nil\n string_literal42 = nil\n string_literal43 = nil\n char_literal45 = nil\n string_literal46 = nil\n char_literal51 = nil\n char_literal53 = nil\n id44 = nil\n throws_spec47 = nil\n options_spec48 = nil\n rule_scope_spec49 = nil\n rule_action50 = nil\n alt_list52 = nil\n exception_group54 = nil\n\n tree_for_modifier = nil\n tree_for_arg = nil\n tree_for_rt = nil\n tree_for_DOC_COMMENT39 = nil\n tree_for_string_literal40 = nil\n tree_for_string_literal41 = nil\n tree_for_string_literal42 = nil\n tree_for_string_literal43 = nil\n tree_for_char_literal45 = nil\n tree_for_string_literal46 = nil\n tree_for_char_literal51 = nil\n tree_for_char_literal53 = nil\n stream_DOC_COMMENT = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token DOC_COMMENT\")\n stream_RET = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token RET\")\n stream_T__71 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__71\")\n stream_BANG = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token BANG\")\n stream_T__76 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__76\")\n stream_T__75 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__75\")\n stream_FRAGMENT = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token FRAGMENT\")\n stream_T__78 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__78\")\n stream_ARG_ACTION = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token ARG_ACTION\")\n stream_T__77 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__77\")\n stream_id = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule id\")\n stream_throws_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule throws_spec\")\n stream_rule_action = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule rule_action\")\n stream_exception_group = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule exception_group\")\n stream_options_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule options_spec\")\n stream_rule_scope_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule rule_scope_spec\")\n stream_alt_list = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule alt_list\")\n begin\n # at line 159:4: ( DOC_COMMENT )? (modifier= ( 'protected' | 'public' | 'private' | 'fragment' ) )? id ( '!' )? (arg= ARG_ACTION )? ( 'returns' rt= ARG_ACTION )? ( throws_spec )? ( options_spec )? ( rule_scope_spec )? ( rule_action )* ':' alt_list ';' ( exception_group )?\n # at line 159:4: ( DOC_COMMENT )?\n alt_15 = 2\n look_15_0 = @input.peek(1)\n\n if (look_15_0 == DOC_COMMENT) \n alt_15 = 1\n end\n case alt_15\n when 1\n # at line 159:4: DOC_COMMENT\n __DOC_COMMENT39__ = match(DOC_COMMENT, TOKENS_FOLLOWING_DOC_COMMENT_IN_rule_791) \n if @state.backtracking == 0\n stream_DOC_COMMENT.add(__DOC_COMMENT39__)\n end\n\n end\n # at line 160:3: (modifier= ( 'protected' | 'public' | 'private' | 'fragment' ) )?\n alt_17 = 2\n look_17_0 = @input.peek(1)\n\n if (look_17_0 == FRAGMENT || look_17_0.between?(T__75, T__77)) \n alt_17 = 1\n end\n case alt_17\n when 1\n # at line 160:5: modifier= ( 'protected' | 'public' | 'private' | 'fragment' )\n # at line 160:14: ( 'protected' | 'public' | 'private' | 'fragment' )\n alt_16 = 4\n case look_16 = @input.peek(1)\n when T__75 then alt_16 = 1\n when T__76 then alt_16 = 2\n when T__77 then alt_16 = 3\n when FRAGMENT then alt_16 = 4\n else\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n\n nvae = NoViableAlternative(\"\", 16, 0)\n raise nvae\n end\n case alt_16\n when 1\n # at line 160:15: 'protected'\n string_literal40 = match(T__75, TOKENS_FOLLOWING_T__75_IN_rule_801) \n if @state.backtracking == 0\n stream_T__75.add(string_literal40)\n end\n\n when 2\n # at line 160:27: 'public'\n string_literal41 = match(T__76, TOKENS_FOLLOWING_T__76_IN_rule_803) \n if @state.backtracking == 0\n stream_T__76.add(string_literal41)\n end\n\n when 3\n # at line 160:36: 'private'\n string_literal42 = match(T__77, TOKENS_FOLLOWING_T__77_IN_rule_805) \n if @state.backtracking == 0\n stream_T__77.add(string_literal42)\n end\n\n when 4\n # at line 160:46: 'fragment'\n string_literal43 = match(FRAGMENT, TOKENS_FOLLOWING_FRAGMENT_IN_rule_807) \n if @state.backtracking == 0\n stream_FRAGMENT.add(string_literal43)\n end\n\n end\n\n end\n @state.following.push(TOKENS_FOLLOWING_id_IN_rule_815)\n id44 = id\n @state.following.pop\n if @state.backtracking == 0\n stream_id.add(id44.tree)\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @rule_stack[-1].name = (id44.nil? ? nil : @input.to_s(id44.start,id44.stop)) \n # <-- action\n end\n # at line 162:3: ( '!' )?\n alt_18 = 2\n look_18_0 = @input.peek(1)\n\n if (look_18_0 == BANG) \n alt_18 = 1\n end\n case alt_18\n when 1\n # at line 162:3: '!'\n char_literal45 = match(BANG, TOKENS_FOLLOWING_BANG_IN_rule_821) \n if @state.backtracking == 0\n stream_BANG.add(char_literal45)\n end\n\n end\n # at line 163:3: (arg= ARG_ACTION )?\n alt_19 = 2\n look_19_0 = @input.peek(1)\n\n if (look_19_0 == ARG_ACTION) \n alt_19 = 1\n end\n case alt_19\n when 1\n # at line 163:5: arg= ARG_ACTION\n arg = match(ARG_ACTION, TOKENS_FOLLOWING_ARG_ACTION_IN_rule_830) \n if @state.backtracking == 0\n stream_ARG_ACTION.add(arg)\n end\n\n end\n # at line 164:3: ( 'returns' rt= ARG_ACTION )?\n alt_20 = 2\n look_20_0 = @input.peek(1)\n\n if (look_20_0 == RET) \n alt_20 = 1\n end\n case alt_20\n when 1\n # at line 164:5: 'returns' rt= ARG_ACTION\n string_literal46 = match(RET, TOKENS_FOLLOWING_RET_IN_rule_839) \n if @state.backtracking == 0\n stream_RET.add(string_literal46)\n end\n rt = match(ARG_ACTION, TOKENS_FOLLOWING_ARG_ACTION_IN_rule_843) \n if @state.backtracking == 0\n stream_ARG_ACTION.add(rt)\n end\n\n end\n # at line 165:3: ( throws_spec )?\n alt_21 = 2\n look_21_0 = @input.peek(1)\n\n if (look_21_0 == T__79) \n alt_21 = 1\n end\n case alt_21\n when 1\n # at line 165:3: throws_spec\n @state.following.push(TOKENS_FOLLOWING_throws_spec_IN_rule_851)\n throws_spec47 = throws_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_throws_spec.add(throws_spec47.tree)\n end\n\n end\n # at line 165:16: ( options_spec )?\n alt_22 = 2\n look_22_0 = @input.peek(1)\n\n if (look_22_0 == OPTIONS) \n alt_22 = 1\n end\n case alt_22\n when 1\n # at line 165:16: options_spec\n @state.following.push(TOKENS_FOLLOWING_options_spec_IN_rule_854)\n options_spec48 = options_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_options_spec.add(options_spec48.tree)\n end\n\n end\n # at line 165:30: ( rule_scope_spec )?\n alt_23 = 2\n look_23_0 = @input.peek(1)\n\n if (look_23_0 == SCOPE) \n alt_23 = 1\n end\n case alt_23\n when 1\n # at line 165:30: rule_scope_spec\n @state.following.push(TOKENS_FOLLOWING_rule_scope_spec_IN_rule_857)\n rule_scope_spec49 = rule_scope_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_rule_scope_spec.add(rule_scope_spec49.tree)\n end\n\n end\n # at line 165:47: ( rule_action )*\n loop do #loop 24\n alt_24 = 2\n look_24_0 = @input.peek(1)\n\n if (look_24_0 == AT) \n alt_24 = 1\n\n end\n case alt_24\n when 1\n # at line 165:47: rule_action\n @state.following.push(TOKENS_FOLLOWING_rule_action_IN_rule_860)\n rule_action50 = rule_action\n @state.following.pop\n if @state.backtracking == 0\n stream_rule_action.add(rule_action50.tree)\n end\n\n else\n break #loop 24\n end\n end\n char_literal51 = match(T__78, TOKENS_FOLLOWING_T__78_IN_rule_865) \n if @state.backtracking == 0\n stream_T__78.add(char_literal51)\n end\n @state.following.push(TOKENS_FOLLOWING_alt_list_IN_rule_867)\n alt_list52 = alt_list\n @state.following.pop\n if @state.backtracking == 0\n stream_alt_list.add(alt_list52.tree)\n end\n char_literal53 = match(T__71, TOKENS_FOLLOWING_T__71_IN_rule_869) \n if @state.backtracking == 0\n stream_T__71.add(char_literal53)\n end\n # at line 167:3: ( exception_group )?\n alt_25 = 2\n look_25_0 = @input.peek(1)\n\n if (look_25_0.between?(T__84, T__85)) \n alt_25 = 1\n end\n case alt_25\n when 1\n # at line 167:3: exception_group\n @state.following.push(TOKENS_FOLLOWING_exception_group_IN_rule_873)\n exception_group54 = exception_group\n @state.following.pop\n if @state.backtracking == 0\n stream_exception_group.add(exception_group54.tree)\n end\n\n end\n # AST Rewrite\n # elements: options_spec, exception_group, RET, throws_spec, rule_action, rt, arg, id, alt_list, rule_scope_spec\n # token labels: arg, rt\n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_arg = token_stream(\"token arg\", arg)\n stream_rt = token_stream(\"token rt\", rt)\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 168:6: -> ^( RULE id ( ^( ARG[$arg] $arg) )? ( ^( 'returns' $rt) )? ( throws_spec )? ( options_spec )? ( rule_scope_spec )? ( rule_action )* alt_list ( exception_group )? EOR[\\\"EOR\\\"] )\n # at line 168:9: ^( RULE id ( ^( ARG[$arg] $arg) )? ( ^( 'returns' $rt) )? ( throws_spec )? ( options_spec )? ( rule_scope_spec )? ( rule_action )* alt_list ( exception_group )? EOR[\\\"EOR\\\"] )\n root_1 = @adaptor.create_flat_list!\n root_1 = @adaptor.become_root(@adaptor.create_from_type!(RULE, \"RULE\"), root_1)\n\n @adaptor.add_child(root_1, stream_id.next_tree)\n @adaptor.add_child(root_1, modifier ? @adaptor.create!(modifier) : nil)\n # at line 168:68: ( ^( ARG[$arg] $arg) )?\n if stream_arg.has_next?\n # at line 168:68: ^( ARG[$arg] $arg)\n root_2 = @adaptor.create_flat_list!\n root_2 = @adaptor.become_root(@adaptor.create!(ARG, arg), root_2)\n\n @adaptor.add_child(root_2, stream_arg.next_node)\n\n @adaptor.add_child(root_1, root_2)\n\n end\n\n stream_arg.reset();\n # at line 168:87: ( ^( 'returns' $rt) )?\n if stream_RET.has_next? || stream_rt.has_next?\n # at line 168:87: ^( 'returns' $rt)\n root_2 = @adaptor.create_flat_list!\n root_2 = @adaptor.become_root(stream_RET.next_node, root_2)\n\n @adaptor.add_child(root_2, stream_rt.next_node)\n\n @adaptor.add_child(root_1, root_2)\n\n end\n\n stream_RET.reset();\n stream_rt.reset();\n # at line 169:9: ( throws_spec )?\n if stream_throws_spec.has_next?\n @adaptor.add_child(root_1, stream_throws_spec.next_tree)\n\n end\n\n stream_throws_spec.reset();\n # at line 169:22: ( options_spec )?\n if stream_options_spec.has_next?\n @adaptor.add_child(root_1, stream_options_spec.next_tree)\n\n end\n\n stream_options_spec.reset();\n # at line 169:36: ( rule_scope_spec )?\n if stream_rule_scope_spec.has_next?\n @adaptor.add_child(root_1, stream_rule_scope_spec.next_tree)\n\n end\n\n stream_rule_scope_spec.reset();\n # at line 169:53: ( rule_action )*\n while stream_rule_action.has_next?\n @adaptor.add_child(root_1, stream_rule_action.next_tree)\n\n end\n\n stream_rule_action.reset();\n @adaptor.add_child(root_1, stream_alt_list.next_tree)\n # at line 171:9: ( exception_group )?\n if stream_exception_group.has_next?\n @adaptor.add_child(root_1, stream_exception_group.next_tree)\n\n end\n\n stream_exception_group.reset();\n @adaptor.add_child(root_1, @adaptor.create!(EOR, \"EOR\"))\n\n @adaptor.add_child(root_0, root_1)\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing(root_0)\n @adaptor.set_token_boundaries(return_value.tree, return_value.start, return_value.stop)\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node!(@input, return_value.start, @input.look(-1), re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 10)\n @rule_stack.pop\n\n end\n \n return return_value\n end",
"def token!\n # at line 1:8: ( AMP | AMP_ASGN | GEQ | AND | GET | GREATER | HAT | HAT_ASGN | QMARK | ARROW | RBRACE | ASGN | IF | RBRACK | IN | BL_END | INCR | BREAK | INSTANCEOF | RETURN | IS_DEFINED | RPAREN | CASE | RSHIFT | CATCH | RSHIFT3 | COLON | LBRACE | RSHIFT3_ASGN | COMMA | LBRACK | RSHIFT_ASGN | LEQ | SEMI | CONST | LESS | SET | CONTINUE | LET | DECR | LPAREN | DEFAULT | LSHIFT | STAR | DELETE | LSHIFT_ASGN | STAR_ASGN | MACRO | SWITCH | MINUS | THIS | DO | MINUS_ASGN | THROW | DOT | TILDE | TRUE | NEQ | TRY | NEQQ | TYPEOF | NEW | EACH | NOT | UNDEFINED | ELSE | NULL | UNLESS | EQ | UNTIL | EQQ | OR | FALSE | OR_ASGN | VAR | FINALLY | VOID | FOR | PIPE | WHILE | PIPE_ASGN | WITH | FUNCTION | PLUS | YIELD | PLUS_ASGN | IS_UNDEFINED | POUND | DEF | CLASS_DEF | OBJECT_DEF | T__148 | T__149 | T__150 | MOD | SLASH | DOC | STRING | NUMBER | NEWLINE | IVAR | ID | WS | EJS )\n alt_72 = 104\n alt_72 = @dfa72.predict( @input )\n case alt_72\n when 1\n # at line 1:10: AMP\n amp!\n\n when 2\n # at line 1:14: AMP_ASGN\n amp_asgn!\n\n when 3\n # at line 1:23: GEQ\n geq!\n\n when 4\n # at line 1:27: AND\n and!\n\n when 5\n # at line 1:31: GET\n get!\n\n when 6\n # at line 1:35: GREATER\n greater!\n\n when 7\n # at line 1:43: HAT\n hat!\n\n when 8\n # at line 1:47: HAT_ASGN\n hat_asgn!\n\n when 9\n # at line 1:56: QMARK\n qmark!\n\n when 10\n # at line 1:62: ARROW\n arrow!\n\n when 11\n # at line 1:68: RBRACE\n rbrace!\n\n when 12\n # at line 1:75: ASGN\n asgn!\n\n when 13\n # at line 1:80: IF\n if!\n\n when 14\n # at line 1:83: RBRACK\n rbrack!\n\n when 15\n # at line 1:90: IN\n in!\n\n when 16\n # at line 1:93: BL_END\n bl_end!\n\n when 17\n # at line 1:100: INCR\n incr!\n\n when 18\n # at line 1:105: BREAK\n break!\n\n when 19\n # at line 1:111: INSTANCEOF\n instanceof!\n\n when 20\n # at line 1:122: RETURN\n return!\n\n when 21\n # at line 1:129: IS_DEFINED\n is_defined!\n\n when 22\n # at line 1:140: RPAREN\n rparen!\n\n when 23\n # at line 1:147: CASE\n case!\n\n when 24\n # at line 1:152: RSHIFT\n rshift!\n\n when 25\n # at line 1:159: CATCH\n catch!\n\n when 26\n # at line 1:165: RSHIFT3\n rshift_3!\n\n when 27\n # at line 1:173: COLON\n colon!\n\n when 28\n # at line 1:179: LBRACE\n lbrace!\n\n when 29\n # at line 1:186: RSHIFT3_ASGN\n rshift_3_asgn!\n\n when 30\n # at line 1:199: COMMA\n comma!\n\n when 31\n # at line 1:205: LBRACK\n lbrack!\n\n when 32\n # at line 1:212: RSHIFT_ASGN\n rshift_asgn!\n\n when 33\n # at line 1:224: LEQ\n leq!\n\n when 34\n # at line 1:228: SEMI\n semi!\n\n when 35\n # at line 1:233: CONST\n const!\n\n when 36\n # at line 1:239: LESS\n less!\n\n when 37\n # at line 1:244: SET\n set!\n\n when 38\n # at line 1:248: CONTINUE\n continue!\n\n when 39\n # at line 1:257: LET\n let!\n\n when 40\n # at line 1:261: DECR\n decr!\n\n when 41\n # at line 1:266: LPAREN\n lparen!\n\n when 42\n # at line 1:273: DEFAULT\n default!\n\n when 43\n # at line 1:281: LSHIFT\n lshift!\n\n when 44\n # at line 1:288: STAR\n star!\n\n when 45\n # at line 1:293: DELETE\n delete!\n\n when 46\n # at line 1:300: LSHIFT_ASGN\n lshift_asgn!\n\n when 47\n # at line 1:312: STAR_ASGN\n star_asgn!\n\n when 48\n # at line 1:322: MACRO\n macro!\n\n when 49\n # at line 1:328: SWITCH\n switch!\n\n when 50\n # at line 1:335: MINUS\n minus!\n\n when 51\n # at line 1:341: THIS\n this!\n\n when 52\n # at line 1:346: DO\n do!\n\n when 53\n # at line 1:349: MINUS_ASGN\n minus_asgn!\n\n when 54\n # at line 1:360: THROW\n throw!\n\n when 55\n # at line 1:366: DOT\n dot!\n\n when 56\n # at line 1:370: TILDE\n tilde!\n\n when 57\n # at line 1:376: TRUE\n true!\n\n when 58\n # at line 1:381: NEQ\n neq!\n\n when 59\n # at line 1:385: TRY\n try!\n\n when 60\n # at line 1:389: NEQQ\n neqq!\n\n when 61\n # at line 1:394: TYPEOF\n typeof!\n\n when 62\n # at line 1:401: NEW\n new!\n\n when 63\n # at line 1:405: EACH\n each!\n\n when 64\n # at line 1:410: NOT\n not!\n\n when 65\n # at line 1:414: UNDEFINED\n undefined!\n\n when 66\n # at line 1:424: ELSE\n else!\n\n when 67\n # at line 1:429: NULL\n null!\n\n when 68\n # at line 1:434: UNLESS\n unless!\n\n when 69\n # at line 1:441: EQ\n eq!\n\n when 70\n # at line 1:444: UNTIL\n until!\n\n when 71\n # at line 1:450: EQQ\n eqq!\n\n when 72\n # at line 1:454: OR\n or!\n\n when 73\n # at line 1:457: FALSE\n false!\n\n when 74\n # at line 1:463: OR_ASGN\n or_asgn!\n\n when 75\n # at line 1:471: VAR\n var!\n\n when 76\n # at line 1:475: FINALLY\n finally!\n\n when 77\n # at line 1:483: VOID\n void!\n\n when 78\n # at line 1:488: FOR\n for!\n\n when 79\n # at line 1:492: PIPE\n pipe!\n\n when 80\n # at line 1:497: WHILE\n while!\n\n when 81\n # at line 1:503: PIPE_ASGN\n pipe_asgn!\n\n when 82\n # at line 1:513: WITH\n with!\n\n when 83\n # at line 1:518: FUNCTION\n function!\n\n when 84\n # at line 1:527: PLUS\n plus!\n\n when 85\n # at line 1:532: YIELD\n yield!\n\n when 86\n # at line 1:538: PLUS_ASGN\n plus_asgn!\n\n when 87\n # at line 1:548: IS_UNDEFINED\n is_undefined!\n\n when 88\n # at line 1:561: POUND\n pound!\n\n when 89\n # at line 1:567: DEF\n def!\n\n when 90\n # at line 1:571: CLASS_DEF\n class_def!\n\n when 91\n # at line 1:581: OBJECT_DEF\n object_def!\n\n when 92\n # at line 1:592: T__148\n t__148!\n\n when 93\n # at line 1:599: T__149\n t__149!\n\n when 94\n # at line 1:606: T__150\n t__150!\n\n when 95\n # at line 1:613: MOD\n mod!\n\n when 96\n # at line 1:617: SLASH\n slash!\n\n when 97\n # at line 1:623: DOC\n doc!\n\n when 98\n # at line 1:627: STRING\n string!\n\n when 99\n # at line 1:634: NUMBER\n number!\n\n when 100\n # at line 1:641: NEWLINE\n newline!\n\n when 101\n # at line 1:649: IVAR\n ivar!\n\n when 102\n # at line 1:654: ID\n id!\n\n when 103\n # at line 1:657: WS\n ws!\n\n when 104\n # at line 1:660: EJS\n ejs!\n\n end\n end",
"def into\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 22 )\n into_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return \n end\n # at line 301:7: ( 'I' | 'i' ) ( 'N' | 'n' ) ( 'T' | 't' ) ( 'O' | 'o' )\n if @input.peek( 1 ).between?( T__24, T__25 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__34, T__35 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__16, T__17 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__18, T__19 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 22 )\n memoize( __method__, into_start_index, success ) if @state.backtracking > 0\n\n end\n \n return \n end",
"def lex_start=(_arg0); end",
"def lex_start=(_arg0); end",
"def lex_start=(_arg0); end",
"def lex_start=(_arg0); end",
"def rule\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n @rule_stack.push( @@rule.new )\n return_value = RuleReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n modifier = nil\n arg = nil\n rt = nil\n __DOC_COMMENT39__ = nil\n string_literal40 = nil\n string_literal41 = nil\n string_literal42 = nil\n string_literal43 = nil\n char_literal45 = nil\n string_literal46 = nil\n char_literal51 = nil\n char_literal53 = nil\n id44 = nil\n throws_spec47 = nil\n options_spec48 = nil\n rule_scope_spec49 = nil\n rule_action50 = nil\n alt_list52 = nil\n exception_group54 = nil\n\n tree_for_modifier = nil\n tree_for_arg = nil\n tree_for_rt = nil\n tree_for_DOC_COMMENT39 = nil\n tree_for_string_literal40 = nil\n tree_for_string_literal41 = nil\n tree_for_string_literal42 = nil\n tree_for_string_literal43 = nil\n tree_for_char_literal45 = nil\n tree_for_string_literal46 = nil\n tree_for_char_literal51 = nil\n tree_for_char_literal53 = nil\n stream_DOC_COMMENT = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token DOC_COMMENT\" )\n stream_RET = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token RET\" )\n stream_T__71 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__71\" )\n stream_BANG = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token BANG\" )\n stream_T__76 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__76\" )\n stream_T__75 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__75\" )\n stream_FRAGMENT = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token FRAGMENT\" )\n stream_T__78 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__78\" )\n stream_ARG_ACTION = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token ARG_ACTION\" )\n stream_T__77 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__77\" )\n stream_id = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule id\" )\n stream_throws_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule throws_spec\" )\n stream_rule_action = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule rule_action\" )\n stream_exception_group = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule exception_group\" )\n stream_options_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule options_spec\" )\n stream_rule_scope_spec = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule rule_scope_spec\" )\n stream_alt_list = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule alt_list\" )\n begin\n # at line 150:4: ( DOC_COMMENT )? (modifier= ( 'protected' | 'public' | 'private' | 'fragment' ) )? id ( '!' )? (arg= ARG_ACTION )? ( 'returns' rt= ARG_ACTION )? ( throws_spec )? ( options_spec )? ( rule_scope_spec )? ( rule_action )* ':' alt_list ';' ( exception_group )?\n # at line 150:4: ( DOC_COMMENT )?\n alt_15 = 2\n look_15_0 = @input.peek( 1 )\n\n if ( look_15_0 == DOC_COMMENT )\n alt_15 = 1\n end\n case alt_15\n when 1\n # at line 150:4: DOC_COMMENT\n __DOC_COMMENT39__ = match( DOC_COMMENT, TOKENS_FOLLOWING_DOC_COMMENT_IN_rule_786 )\n if @state.backtracking == 0\n stream_DOC_COMMENT.add( __DOC_COMMENT39__ )\n end\n\n end\n # at line 151:3: (modifier= ( 'protected' | 'public' | 'private' | 'fragment' ) )?\n alt_17 = 2\n look_17_0 = @input.peek( 1 )\n\n if ( look_17_0 == FRAGMENT || look_17_0.between?( T__75, T__77 ) )\n alt_17 = 1\n end\n case alt_17\n when 1\n # at line 151:5: modifier= ( 'protected' | 'public' | 'private' | 'fragment' )\n # at line 151:14: ( 'protected' | 'public' | 'private' | 'fragment' )\n alt_16 = 4\n case look_16 = @input.peek( 1 )\n when T__75 then alt_16 = 1\n when T__76 then alt_16 = 2\n when T__77 then alt_16 = 3\n when FRAGMENT then alt_16 = 4\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n raise NoViableAlternative( \"\", 16, 0 )\n end\n case alt_16\n when 1\n # at line 151:15: 'protected'\n string_literal40 = match( T__75, TOKENS_FOLLOWING_T__75_IN_rule_796 )\n if @state.backtracking == 0\n stream_T__75.add( string_literal40 )\n end\n\n when 2\n # at line 151:27: 'public'\n string_literal41 = match( T__76, TOKENS_FOLLOWING_T__76_IN_rule_798 )\n if @state.backtracking == 0\n stream_T__76.add( string_literal41 )\n end\n\n when 3\n # at line 151:36: 'private'\n string_literal42 = match( T__77, TOKENS_FOLLOWING_T__77_IN_rule_800 )\n if @state.backtracking == 0\n stream_T__77.add( string_literal42 )\n end\n\n when 4\n # at line 151:46: 'fragment'\n string_literal43 = match( FRAGMENT, TOKENS_FOLLOWING_FRAGMENT_IN_rule_802 )\n if @state.backtracking == 0\n stream_FRAGMENT.add( string_literal43 )\n end\n\n end\n\n end\n @state.following.push( TOKENS_FOLLOWING_id_IN_rule_810 )\n id44 = id\n @state.following.pop\n if @state.backtracking == 0\n stream_id.add( id44.tree )\n end\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n @rule_stack.last.name = ( id44 && @input.to_s( id44.start, id44.stop ) ) \n # <-- action\n end\n # at line 153:3: ( '!' )?\n alt_18 = 2\n look_18_0 = @input.peek( 1 )\n\n if ( look_18_0 == BANG )\n alt_18 = 1\n end\n case alt_18\n when 1\n # at line 153:3: '!'\n char_literal45 = match( BANG, TOKENS_FOLLOWING_BANG_IN_rule_816 )\n if @state.backtracking == 0\n stream_BANG.add( char_literal45 )\n end\n\n end\n # at line 154:3: (arg= ARG_ACTION )?\n alt_19 = 2\n look_19_0 = @input.peek( 1 )\n\n if ( look_19_0 == ARG_ACTION )\n alt_19 = 1\n end\n case alt_19\n when 1\n # at line 154:5: arg= ARG_ACTION\n arg = match( ARG_ACTION, TOKENS_FOLLOWING_ARG_ACTION_IN_rule_825 )\n if @state.backtracking == 0\n stream_ARG_ACTION.add( arg )\n end\n\n end\n # at line 155:3: ( 'returns' rt= ARG_ACTION )?\n alt_20 = 2\n look_20_0 = @input.peek( 1 )\n\n if ( look_20_0 == RET )\n alt_20 = 1\n end\n case alt_20\n when 1\n # at line 155:5: 'returns' rt= ARG_ACTION\n string_literal46 = match( RET, TOKENS_FOLLOWING_RET_IN_rule_834 )\n if @state.backtracking == 0\n stream_RET.add( string_literal46 )\n end\n rt = match( ARG_ACTION, TOKENS_FOLLOWING_ARG_ACTION_IN_rule_838 )\n if @state.backtracking == 0\n stream_ARG_ACTION.add( rt )\n end\n\n end\n # at line 156:3: ( throws_spec )?\n alt_21 = 2\n look_21_0 = @input.peek( 1 )\n\n if ( look_21_0 == T__79 )\n alt_21 = 1\n end\n case alt_21\n when 1\n # at line 156:3: throws_spec\n @state.following.push( TOKENS_FOLLOWING_throws_spec_IN_rule_846 )\n throws_spec47 = throws_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_throws_spec.add( throws_spec47.tree )\n end\n\n end\n # at line 156:16: ( options_spec )?\n alt_22 = 2\n look_22_0 = @input.peek( 1 )\n\n if ( look_22_0 == OPTIONS )\n alt_22 = 1\n end\n case alt_22\n when 1\n # at line 156:16: options_spec\n @state.following.push( TOKENS_FOLLOWING_options_spec_IN_rule_849 )\n options_spec48 = options_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_options_spec.add( options_spec48.tree )\n end\n\n end\n # at line 156:30: ( rule_scope_spec )?\n alt_23 = 2\n look_23_0 = @input.peek( 1 )\n\n if ( look_23_0 == SCOPE )\n alt_23 = 1\n end\n case alt_23\n when 1\n # at line 156:30: rule_scope_spec\n @state.following.push( TOKENS_FOLLOWING_rule_scope_spec_IN_rule_852 )\n rule_scope_spec49 = rule_scope_spec\n @state.following.pop\n if @state.backtracking == 0\n stream_rule_scope_spec.add( rule_scope_spec49.tree )\n end\n\n end\n # at line 156:47: ( rule_action )*\n while true # decision 24\n alt_24 = 2\n look_24_0 = @input.peek( 1 )\n\n if ( look_24_0 == AT )\n alt_24 = 1\n\n end\n case alt_24\n when 1\n # at line 156:47: rule_action\n @state.following.push( TOKENS_FOLLOWING_rule_action_IN_rule_855 )\n rule_action50 = rule_action\n @state.following.pop\n if @state.backtracking == 0\n stream_rule_action.add( rule_action50.tree )\n end\n\n else\n break # out of loop for decision 24\n end\n end # loop for decision 24\n char_literal51 = match( T__78, TOKENS_FOLLOWING_T__78_IN_rule_860 )\n if @state.backtracking == 0\n stream_T__78.add( char_literal51 )\n end\n @state.following.push( TOKENS_FOLLOWING_alt_list_IN_rule_862 )\n alt_list52 = alt_list\n @state.following.pop\n if @state.backtracking == 0\n stream_alt_list.add( alt_list52.tree )\n end\n char_literal53 = match( T__71, TOKENS_FOLLOWING_T__71_IN_rule_864 )\n if @state.backtracking == 0\n stream_T__71.add( char_literal53 )\n end\n # at line 158:3: ( exception_group )?\n alt_25 = 2\n look_25_0 = @input.peek( 1 )\n\n if ( look_25_0.between?( T__84, T__85 ) )\n alt_25 = 1\n end\n case alt_25\n when 1\n # at line 158:3: exception_group\n @state.following.push( TOKENS_FOLLOWING_exception_group_IN_rule_868 )\n exception_group54 = exception_group\n @state.following.pop\n if @state.backtracking == 0\n stream_exception_group.add( exception_group54.tree )\n end\n\n end\n # AST Rewrite\n # elements: id, arg, rule_action, throws_spec, rule_scope_spec, exception_group, options_spec, rt, RET, alt_list\n # token labels: arg, rt\n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_arg = token_stream( \"token arg\", arg )\n stream_rt = token_stream( \"token rt\", rt )\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 159:6: -> ^( RULE id ( ^( ARG[$arg] $arg) )? ( ^( 'returns' $rt) )? ( throws_spec )? ( options_spec )? ( rule_scope_spec )? ( rule_action )* alt_list ( exception_group )? EOR[\\\"EOR\\\"] )\n # at line 159:9: ^( RULE id ( ^( ARG[$arg] $arg) )? ( ^( 'returns' $rt) )? ( throws_spec )? ( options_spec )? ( rule_scope_spec )? ( rule_action )* alt_list ( exception_group )? EOR[\\\"EOR\\\"] )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( @adaptor.create_from_type( RULE, \"RULE\" ), root_1 )\n\n @adaptor.add_child( root_1, stream_id.next_tree )\n @adaptor.add_child( root_1, ( modifier ? @adaptor.create(modifier) : nil ) )\n # at line 159:67: ( ^( ARG[$arg] $arg) )?\n if stream_arg.has_next?\n # at line 159:67: ^( ARG[$arg] $arg)\n root_2 = @adaptor.create_flat_list\n root_2 = @adaptor.become_root( @adaptor.create( ARG, arg ), root_2 )\n\n @adaptor.add_child( root_2, stream_arg.next_node )\n\n @adaptor.add_child( root_1, root_2 )\n\n end\n\n stream_arg.reset();\n # at line 159:86: ( ^( 'returns' $rt) )?\n if stream_rt.has_next? || stream_RET.has_next?\n # at line 159:86: ^( 'returns' $rt)\n root_2 = @adaptor.create_flat_list\n root_2 = @adaptor.become_root( stream_RET.next_node, root_2 )\n\n @adaptor.add_child( root_2, stream_rt.next_node )\n\n @adaptor.add_child( root_1, root_2 )\n\n end\n\n stream_rt.reset();\n stream_RET.reset();\n # at line 160:9: ( throws_spec )?\n if stream_throws_spec.has_next?\n @adaptor.add_child( root_1, stream_throws_spec.next_tree )\n\n end\n\n stream_throws_spec.reset();\n # at line 160:22: ( options_spec )?\n if stream_options_spec.has_next?\n @adaptor.add_child( root_1, stream_options_spec.next_tree )\n\n end\n\n stream_options_spec.reset();\n # at line 160:36: ( rule_scope_spec )?\n if stream_rule_scope_spec.has_next?\n @adaptor.add_child( root_1, stream_rule_scope_spec.next_tree )\n\n end\n\n stream_rule_scope_spec.reset();\n # at line 160:53: ( rule_action )*\n while stream_rule_action.has_next?\n @adaptor.add_child( root_1, stream_rule_action.next_tree )\n\n end\n\n stream_rule_action.reset();\n @adaptor.add_child( root_1, stream_alt_list.next_tree )\n # at line 162:9: ( exception_group )?\n if stream_exception_group.has_next?\n @adaptor.add_child( root_1, stream_exception_group.next_tree )\n\n end\n\n stream_exception_group.reset();\n @adaptor.add_child( root_1, @adaptor.create( EOR, \"EOR\" ) )\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n @rule_stack.pop\n\n end\n \n return return_value\n end",
"def token_spec\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 3)\n return_value = TokenSpecReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n lit = nil\n __TOKEN_REF16__ = nil\n char_literal17 = nil\n char_literal18 = nil\n\n tree_for_lit = nil\n tree_for_TOKEN_REF16 = nil\n tree_for_char_literal17 = nil\n tree_for_char_literal18 = nil\n stream_STRING_LITERAL = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token STRING_LITERAL\")\n stream_T__71 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__71\")\n stream_CHAR_LITERAL = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token CHAR_LITERAL\")\n stream_LABEL_ASSIGN = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token LABEL_ASSIGN\")\n stream_TOKEN_REF = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token TOKEN_REF\")\n\n begin\n # at line 114:4: TOKEN_REF ( '=' (lit= STRING_LITERAL | lit= CHAR_LITERAL ) -> ^( '=' TOKEN_REF $lit) | -> TOKEN_REF ) ';'\n __TOKEN_REF16__ = match(TOKEN_REF, TOKENS_FOLLOWING_TOKEN_REF_IN_token_spec_492) \n if @state.backtracking == 0\n stream_TOKEN_REF.add(__TOKEN_REF16__)\n end\n # at line 115:3: ( '=' (lit= STRING_LITERAL | lit= CHAR_LITERAL ) -> ^( '=' TOKEN_REF $lit) | -> TOKEN_REF )\n alt_10 = 2\n look_10_0 = @input.peek(1)\n\n if (look_10_0 == LABEL_ASSIGN) \n alt_10 = 1\n elsif (look_10_0 == T__71) \n alt_10 = 2\n else\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n nvae = NoViableAlternative(\"\", 10, 0)\n raise nvae\n end\n case alt_10\n when 1\n # at line 115:5: '=' (lit= STRING_LITERAL | lit= CHAR_LITERAL )\n char_literal17 = match(LABEL_ASSIGN, TOKENS_FOLLOWING_LABEL_ASSIGN_IN_token_spec_498) \n if @state.backtracking == 0\n stream_LABEL_ASSIGN.add(char_literal17)\n end\n # at line 115:9: (lit= STRING_LITERAL | lit= CHAR_LITERAL )\n alt_9 = 2\n look_9_0 = @input.peek(1)\n\n if (look_9_0 == STRING_LITERAL) \n alt_9 = 1\n elsif (look_9_0 == CHAR_LITERAL) \n alt_9 = 2\n else\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n nvae = NoViableAlternative(\"\", 9, 0)\n raise nvae\n end\n case alt_9\n when 1\n # at line 115:10: lit= STRING_LITERAL\n lit = match(STRING_LITERAL, TOKENS_FOLLOWING_STRING_LITERAL_IN_token_spec_503) \n if @state.backtracking == 0\n stream_STRING_LITERAL.add(lit)\n end\n\n when 2\n # at line 115:29: lit= CHAR_LITERAL\n lit = match(CHAR_LITERAL, TOKENS_FOLLOWING_CHAR_LITERAL_IN_token_spec_507) \n if @state.backtracking == 0\n stream_CHAR_LITERAL.add(lit)\n end\n\n end\n # AST Rewrite\n # elements: lit, LABEL_ASSIGN, TOKEN_REF\n # token labels: lit\n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_lit = token_stream(\"token lit\", lit)\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 115:47: -> ^( '=' TOKEN_REF $lit)\n # at line 115:50: ^( '=' TOKEN_REF $lit)\n root_1 = @adaptor.create_flat_list!\n root_1 = @adaptor.become_root(stream_LABEL_ASSIGN.next_node, root_1)\n\n @adaptor.add_child(root_1, stream_TOKEN_REF.next_node)\n @adaptor.add_child(root_1, stream_lit.next_node)\n\n @adaptor.add_child(root_0, root_1)\n\n\n\n return_value.tree = root_0\n\n end\n when 2\n # at line 116:16: \n # AST Rewrite\n # elements: TOKEN_REF\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 116:16: -> TOKEN_REF\n @adaptor.add_child(root_0, stream_TOKEN_REF.next_node)\n\n\n\n return_value.tree = root_0\n\n end\n end\n char_literal18 = match(T__71, TOKENS_FOLLOWING_T__71_IN_token_spec_546) \n if @state.backtracking == 0\n stream_T__71.add(char_literal18)\n end\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing(root_0)\n @adaptor.set_token_boundaries(return_value.tree, return_value.start, return_value.stop)\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node!(@input, return_value.start, @input.look(-1), re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 3)\n\n end\n \n return return_value\n end",
"def from\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 27 )\n from_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return \n end\n # at line 306:7: ( 'F' | 'f' ) ( 'R' | 'r' ) ( 'O' | 'o' ) ( 'M' | 'm' )\n if @input.peek( 1 ).between?( T__30, T__31 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__20, T__21 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__18, T__19 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__40, T__41 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 27 )\n memoize( __method__, from_start_index, success ) if @state.backtracking > 0\n\n end\n \n return \n end",
"def scan_tokens\n i = 0\n loop do\n break unless @tokens[i]\n move = yield(@tokens[i - 1], @tokens[i], @tokens[i + 1], i)\n i += move\n end\n end",
"def for_\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 44 )\n for__start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return \n end\n # at line 331:7: ( 'F' | 'f' ) ( 'O' | 'o' ) ( 'R' | 'r' )\n if @input.peek( 1 ).between?( T__30, T__31 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__18, T__19 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__20, T__21 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 44 )\n memoize( __method__, for__start_index, success ) if @state.backtracking > 0\n\n end\n \n return \n end",
"def consume\n cur = @current_token\n next_token\n return cur\n end",
"def lex\n @index += 1\n while lexer.tokens[@index] === :COMMENT\n @index += 1\n end\n lexer.tokens[@index] or unexpected_error(:EOF)\n end",
"def scan\n #Puppet.debug(\"entering scan\")\n lex_error \"Internal Error: No string or file given to lexer to process.\" unless @scanner\n\n # Skip any initial whitespace.\n skip\n\n until token_queue.empty? and @scanner.eos? do\n offset = @scanner.pos\n matched_token, value = find_token\n end_offset = @scanner.pos\n\n # error out if we didn't match anything at all\n lex_error \"Could not match #{@scanner.rest[/^(\\S+|\\s+|.*)/]}\" unless matched_token\n\n newline = matched_token.name == :RETURN\n\n lexing_context[:start_of_line] = newline\n lexing_context[:offset] = offset\n lexing_context[:end_offset] = end_offset\n\n final_token, token_value = munge_token(matched_token, value)\n # update end position since munging may have moved the end offset\n lexing_context[:end_offset] = @scanner.pos\n\n unless final_token\n skip\n next\n end\n\n lexing_context[:after] = final_token.name unless newline\n if final_token.name == :DQPRE\n lexing_context[:interpolation_stack] << lexing_context[:brace_count]\n elsif final_token.name == :DQPOST\n lexing_context[:interpolation_stack].pop\n end\n\n value = token_value[:value]\n\n if match = @@pairs[value] and final_token.name != :DQUOTE and final_token.name != :SQUOTE\n @expected << match\n elsif exp = @expected[-1] and exp == value and final_token.name != :DQUOTE and final_token.name != :SQUOTE\n @expected.pop\n end\n\n yield [final_token.name, token_value]\n\n if @previous_token\n namestack(value) if @previous_token.name == :CLASS and value != '{'\n\n if @previous_token.name == :DEFINE\n if indefine?\n msg = \"Cannot nest definition #{value} inside #{@indefine}\"\n self.indefine = false\n raise Puppet::ParseError, msg\n end\n\n @indefine = value\n end\n end\n @previous_token = final_token\n skip\n end\n # Cannot reset @scanner to nil here - it is needed to answer questions about context after\n # completed parsing.\n # Seems meaningless to do this. Everything will be gc anyway.\n #@scanner = nil\n\n # This indicates that we're done parsing.\n yield [false,false]\n end",
"def consume!\n empty_line ||\n name_token ||\n comment_token ||\n whitespace_token ||\n line_token ||\n heredoc_token ||\n string_token ||\n number_token ||\n regex_token ||\n literal_token\n end",
"def token!\n # at line 1:8: ( T__8 | T__9 | T__10 | T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | T__33 | T__34 | T__35 | T__36 | T__37 | T__38 | T__39 | T__40 | T__41 | T__42 | T__43 | DIGIT | WS | EOL | LETTER )\n alt_3 = 40\n alt_3 = @dfa3.predict( @input )\n case alt_3\n when 1\n # at line 1:10: T__8\n t__8!\n\n when 2\n # at line 1:15: T__9\n t__9!\n\n when 3\n # at line 1:20: T__10\n t__10!\n\n when 4\n # at line 1:26: T__11\n t__11!\n\n when 5\n # at line 1:32: T__12\n t__12!\n\n when 6\n # at line 1:38: T__13\n t__13!\n\n when 7\n # at line 1:44: T__14\n t__14!\n\n when 8\n # at line 1:50: T__15\n t__15!\n\n when 9\n # at line 1:56: T__16\n t__16!\n\n when 10\n # at line 1:62: T__17\n t__17!\n\n when 11\n # at line 1:68: T__18\n t__18!\n\n when 12\n # at line 1:74: T__19\n t__19!\n\n when 13\n # at line 1:80: T__20\n t__20!\n\n when 14\n # at line 1:86: T__21\n t__21!\n\n when 15\n # at line 1:92: T__22\n t__22!\n\n when 16\n # at line 1:98: T__23\n t__23!\n\n when 17\n # at line 1:104: T__24\n t__24!\n\n when 18\n # at line 1:110: T__25\n t__25!\n\n when 19\n # at line 1:116: T__26\n t__26!\n\n when 20\n # at line 1:122: T__27\n t__27!\n\n when 21\n # at line 1:128: T__28\n t__28!\n\n when 22\n # at line 1:134: T__29\n t__29!\n\n when 23\n # at line 1:140: T__30\n t__30!\n\n when 24\n # at line 1:146: T__31\n t__31!\n\n when 25\n # at line 1:152: T__32\n t__32!\n\n when 26\n # at line 1:158: T__33\n t__33!\n\n when 27\n # at line 1:164: T__34\n t__34!\n\n when 28\n # at line 1:170: T__35\n t__35!\n\n when 29\n # at line 1:176: T__36\n t__36!\n\n when 30\n # at line 1:182: T__37\n t__37!\n\n when 31\n # at line 1:188: T__38\n t__38!\n\n when 32\n # at line 1:194: T__39\n t__39!\n\n when 33\n # at line 1:200: T__40\n t__40!\n\n when 34\n # at line 1:206: T__41\n t__41!\n\n when 35\n # at line 1:212: T__42\n t__42!\n\n when 36\n # at line 1:218: T__43\n t__43!\n\n when 37\n # at line 1:224: DIGIT\n digit!\n\n when 38\n # at line 1:230: WS\n ws!\n\n when 39\n # at line 1:233: EOL\n eol!\n\n when 40\n # at line 1:237: LETTER\n letter!\n\n end\n end",
"def token!\n # at line 1:8: ( T__11 | T__12 | T__13 | T__14 | NUM | VAR | WS )\n alt_4 = 7\n alt_4 = @dfa4.predict( @input )\n case alt_4\n when 1\n # at line 1:10: T__11\n t__11!\n\n when 2\n # at line 1:16: T__12\n t__12!\n\n when 3\n # at line 1:22: T__13\n t__13!\n\n when 4\n # at line 1:28: T__14\n t__14!\n\n when 5\n # at line 1:34: NUM\n num!\n\n when 6\n # at line 1:38: VAR\n var!\n\n when 7\n # at line 1:42: WS\n ws!\n\n end\n end",
"def token_spec\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n return_value = TokenSpecReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n lit = nil\n __TOKEN_REF16__ = nil\n char_literal17 = nil\n char_literal18 = nil\n\n tree_for_lit = nil\n tree_for_TOKEN_REF16 = nil\n tree_for_char_literal17 = nil\n tree_for_char_literal18 = nil\n stream_STRING_LITERAL = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token STRING_LITERAL\" )\n stream_T__71 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__71\" )\n stream_CHAR_LITERAL = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token CHAR_LITERAL\" )\n stream_LABEL_ASSIGN = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token LABEL_ASSIGN\" )\n stream_TOKEN_REF = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token TOKEN_REF\" )\n\n begin\n # at line 105:4: TOKEN_REF ( '=' (lit= STRING_LITERAL | lit= CHAR_LITERAL ) -> ^( '=' TOKEN_REF $lit) | -> TOKEN_REF ) ';'\n __TOKEN_REF16__ = match( TOKEN_REF, TOKENS_FOLLOWING_TOKEN_REF_IN_token_spec_487 )\n if @state.backtracking == 0\n stream_TOKEN_REF.add( __TOKEN_REF16__ )\n end\n # at line 106:3: ( '=' (lit= STRING_LITERAL | lit= CHAR_LITERAL ) -> ^( '=' TOKEN_REF $lit) | -> TOKEN_REF )\n alt_10 = 2\n look_10_0 = @input.peek( 1 )\n\n if ( look_10_0 == LABEL_ASSIGN )\n alt_10 = 1\n elsif ( look_10_0 == T__71 )\n alt_10 = 2\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n raise NoViableAlternative( \"\", 10, 0 )\n end\n case alt_10\n when 1\n # at line 106:5: '=' (lit= STRING_LITERAL | lit= CHAR_LITERAL )\n char_literal17 = match( LABEL_ASSIGN, TOKENS_FOLLOWING_LABEL_ASSIGN_IN_token_spec_493 )\n if @state.backtracking == 0\n stream_LABEL_ASSIGN.add( char_literal17 )\n end\n # at line 106:9: (lit= STRING_LITERAL | lit= CHAR_LITERAL )\n alt_9 = 2\n look_9_0 = @input.peek( 1 )\n\n if ( look_9_0 == STRING_LITERAL )\n alt_9 = 1\n elsif ( look_9_0 == CHAR_LITERAL )\n alt_9 = 2\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n raise NoViableAlternative( \"\", 9, 0 )\n end\n case alt_9\n when 1\n # at line 106:10: lit= STRING_LITERAL\n lit = match( STRING_LITERAL, TOKENS_FOLLOWING_STRING_LITERAL_IN_token_spec_498 )\n if @state.backtracking == 0\n stream_STRING_LITERAL.add( lit )\n end\n\n when 2\n # at line 106:29: lit= CHAR_LITERAL\n lit = match( CHAR_LITERAL, TOKENS_FOLLOWING_CHAR_LITERAL_IN_token_spec_502 )\n if @state.backtracking == 0\n stream_CHAR_LITERAL.add( lit )\n end\n\n end\n # AST Rewrite\n # elements: TOKEN_REF, lit, LABEL_ASSIGN\n # token labels: lit\n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_lit = token_stream( \"token lit\", lit )\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 106:47: -> ^( '=' TOKEN_REF $lit)\n # at line 106:50: ^( '=' TOKEN_REF $lit)\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( stream_LABEL_ASSIGN.next_node, root_1 )\n\n @adaptor.add_child( root_1, stream_TOKEN_REF.next_node )\n @adaptor.add_child( root_1, stream_lit.next_node )\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n\n end\n when 2\n # at line 107:16: \n # AST Rewrite\n # elements: TOKEN_REF\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 107:16: -> TOKEN_REF\n @adaptor.add_child( root_0, stream_TOKEN_REF.next_node )\n\n\n\n return_value.tree = root_0\n\n end\n end\n char_literal18 = match( T__71, TOKENS_FOLLOWING_T__71_IN_token_spec_541 )\n if @state.backtracking == 0\n stream_T__71.add( char_literal18 )\n end\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end\n \n return return_value\n end",
"def next\n File.open(\"experiments/test.txt\", 'r') do |f|\n f.seek(@position, IO::SEEK_SET)\n while char = f.getc\n\n if char == \"\\n\"\n @row += 1\n @column = 0\n else\n @column += 1\n end\n\n case @state\n when 0 # Starting state\n\n @token = Lexigen::Token.new\n @token.column = @column\n @token.row = @row\n\n case char\n when /\\d/ # When digit\n @token.word << char\n @state = 1\n when /\\s/ # When space\n @state = 0\n when /\\w/ # When letter\n @token.word << char\n @state = 3\n else # Everything else\n @position = f.pos - 1\n return \"error\"\n end\n when 1 # Integer\n case char\n when /\\d/ # When digit\n @token.word << char\n @state = 1\n when /\\./ # When decimal point\n @token.word << char\n @state = 2\n else # Return integer\n @state = 0\n @token.type = :int\n @position = f.pos - 1\n return @token\n end\n when 2 # Float\n case char\n when /\\d/ # When digit\n @token.word << char\n @state = 2\n else # Return float\n @state = 0\n @token.type = :float\n @position = f.pos - 1\n return @token\n end\n when 3 # Identification\n case char\n when /\\w|\\d|_/ # When letter, digit or underscore\n @token.word << char\n @state = 3\n else # Return identification\n @state = 0\n @token.type = :id\n @position = f.pos - 1\n return @token\n end\n end\n end\n\n end\n end",
"def next_token\n return @extra_tokens.pop unless @extra_tokens.empty?\n\n skip_whitespace\n c = @sql[@pos, 1]\n return next_string(c) if quote?(c)\n\n first_is_identifier_char = identifier_char?(c)\n t = c\n @pos += 1\n while @pos < @length\n c = @sql[@pos, 1]\n break if c == ' '\n\n this_is_identifier_char = identifier_char?(c)\n break if first_is_identifier_char != this_is_identifier_char && @length > 0\n break if !this_is_identifier_char && quote?(c)\n\n t << c\n @pos += 1\n end\n\n case t\n when ''\n nil\n when /^\\d+$/\n t.to_i\n else\n t\n end\n end",
"def get_token\n\t\tt = Token.new\n\t\tcase @src[@lineno][@linepos]\n\t\t\twhen ' ' then\n\t\t\t\tskip_whitespace\n\t\t\twhen '\\f' then #less likely to see this\n\t\t\t\tskip_whitespace\n\t\t\twhen '\\t' then\n\t\t\t\tskip_whitespace\n\t\t\twhen '\\v' then\n\t\t\t\tskip_whitespace\n\t\t\twhen '0'..'9' then\n\t\t\t\tt = parse_number\n\t\t\twhen 'A-Z' then\n\t\t\t\tt = parse_name\n\t\t\twhen 'a-z' then\n\t\t\t\tparse_name\n\t\t\twhen '_' then\n\t\t\t\tt = parse_name\n\t\t\twhen /[~!$%\\^&*()-+=|{}\\[\\]\\:;\\/?<>,.]/ then #very much check\n\t\t\t\tt = parse_operator\n\t\t\twhen '\"' then\n\t\t\t\tt = parse_string\n\t\tend\n\tend",
"def advance\n @lookahead = next_token()\n end",
"def next_token\n result = peek_token\n @start = @finish\n return result if @start >= @expr.length\n\n if @expr[@start].numeric?\n @finish = @start + 1\n while @finish < @expr.length && @expr[@finish].to_s.numeric?\n @finish = @finish + 1\n end\n else\n @finish = @start + 1\n end\n result\n end",
"def next_token; @stack.shift; end",
"def token!\n # at line 1:8: ( PROGRAM | PRINT | VAR | CTE | IF | ELSE | AND | OR | GT | GTE | LT | LTE | INTEGER | FLOAT | STRING | BOOLEAN | VOID | FUNCTION | FIN | RETURN | INPUT | WHILE | CLASS | WORD | NUM_I | NUM_F | BOOL | ID | LPAREN | RPAREN | LKEY | RKEY | EQUAL | DIF | SIMILAR | COMMA | DDOT | SEMI | PLUS | SUB | DIV | MUL | INHERITS | DOT | WHITESPACE )\n alt_10 = 45\n alt_10 = @dfa10.predict( @input )\n case alt_10\n when 1\n # at line 1:10: PROGRAM\n program!\n\n\n when 2\n # at line 1:18: PRINT\n print!\n\n\n when 3\n # at line 1:24: VAR\n var!\n\n\n when 4\n # at line 1:28: CTE\n cte!\n\n\n when 5\n # at line 1:32: IF\n if!\n\n\n when 6\n # at line 1:35: ELSE\n else!\n\n\n when 7\n # at line 1:40: AND\n and!\n\n\n when 8\n # at line 1:44: OR\n or!\n\n\n when 9\n # at line 1:47: GT\n gt!\n\n\n when 10\n # at line 1:50: GTE\n gte!\n\n\n when 11\n # at line 1:54: LT\n lt!\n\n\n when 12\n # at line 1:57: LTE\n lte!\n\n\n when 13\n # at line 1:61: INTEGER\n integer!\n\n\n when 14\n # at line 1:69: FLOAT\n float!\n\n\n when 15\n # at line 1:75: STRING\n string!\n\n\n when 16\n # at line 1:82: BOOLEAN\n boolean!\n\n\n when 17\n # at line 1:90: VOID\n void!\n\n\n when 18\n # at line 1:95: FUNCTION\n function!\n\n\n when 19\n # at line 1:104: FIN\n fin!\n\n\n when 20\n # at line 1:108: RETURN\n return!\n\n\n when 21\n # at line 1:115: INPUT\n input!\n\n\n when 22\n # at line 1:121: WHILE\n while!\n\n\n when 23\n # at line 1:127: CLASS\n class!\n\n\n when 24\n # at line 1:133: WORD\n word!\n\n\n when 25\n # at line 1:138: NUM_I\n num_i!\n\n\n when 26\n # at line 1:144: NUM_F\n num_f!\n\n\n when 27\n # at line 1:150: BOOL\n bool!\n\n\n when 28\n # at line 1:155: ID\n id!\n\n\n when 29\n # at line 1:158: LPAREN\n lparen!\n\n\n when 30\n # at line 1:165: RPAREN\n rparen!\n\n\n when 31\n # at line 1:172: LKEY\n lkey!\n\n\n when 32\n # at line 1:177: RKEY\n rkey!\n\n\n when 33\n # at line 1:182: EQUAL\n equal!\n\n\n when 34\n # at line 1:188: DIF\n dif!\n\n\n when 35\n # at line 1:192: SIMILAR\n similar!\n\n\n when 36\n # at line 1:200: COMMA\n comma!\n\n\n when 37\n # at line 1:206: DDOT\n ddot!\n\n\n when 38\n # at line 1:211: SEMI\n semi!\n\n\n when 39\n # at line 1:216: PLUS\n plus!\n\n\n when 40\n # at line 1:221: SUB\n sub!\n\n\n when 41\n # at line 1:225: DIV\n div!\n\n\n when 42\n # at line 1:229: MUL\n mul!\n\n\n when 43\n # at line 1:233: INHERITS\n inherits!\n\n\n when 44\n # at line 1:242: DOT\n dot!\n\n\n when 45\n # at line 1:246: WHITESPACE\n whitespace!\n\n\n end\n end",
"def next\n\t\tif @next_token\n\t\t\ttoken = @next_token\n\t\t\t@next_token = nil\n\t\t\treturn token\n\t\telse\n\t\t\ttoken = read_token\n\t\t\treturn token\n\t\tend\n\tend",
"def next\n if @next.is_a? TokenSource\n @next = @next.next\n return @next \n end\n @next\n end",
"def tokenize\n return consume_string if @state == :string\n\n trim_stream\n\n # Check if we're dealing with a keyword!\n return create_token(:keyword, consume_pattern(KEYWORD_DEF)) unless @stream.match(KEYWORD_DEF).nil?\n\n # Now we must check to see what else we could be finding. Remember whatever we\n # encounter here is the *start* of whatever token it is; a \" character here means\n # the start of a string..\n if @stream[0].match STRING_START_DEF\n @state = :string\n @partial_string['delimiter'] = @stream[0]\n consume\n\n return nil\n end\n\n return create_token(:terminator, consume) if @stream[0] == ';'\n return create_token(:operator, consume) if @stream[0] == '+'\n\n return create_token(:name, consume_pattern(NAME_DEF)) unless @stream.match(NAME_DEF).nil?\n\n raise_tokenizer_error \"Illegal character '#{@stream[0]}' - unable to form a token with this character!\"\n end",
"def token!\n # at line 1:8: ( STRING | SHELL_STRING | CMD_OUTPUT | SPACE | COMMAND_END | VARIABLE | GLOB | CHUNK | OPEN_PAR | CLOSE_PAR | PIPELINE_OPERATOR | REDIRECT | COMMENT )\n alt_25 = 13\n alt_25 = @dfa25.predict( @input )\n case alt_25\n when 1\n # at line 1:10: STRING\n string!\n\n when 2\n # at line 1:17: SHELL_STRING\n shell_string!\n\n when 3\n # at line 1:30: CMD_OUTPUT\n cmd_output!\n\n when 4\n # at line 1:41: SPACE\n space!\n\n when 5\n # at line 1:47: COMMAND_END\n command_end!\n\n when 6\n # at line 1:59: VARIABLE\n variable!\n\n when 7\n # at line 1:68: GLOB\n glob!\n\n when 8\n # at line 1:73: CHUNK\n chunk!\n\n when 9\n # at line 1:79: OPEN_PAR\n open_par!\n\n when 10\n # at line 1:88: CLOSE_PAR\n close_par!\n\n when 11\n # at line 1:98: PIPELINE_OPERATOR\n pipeline_operator!\n\n when 12\n # at line 1:116: REDIRECT\n redirect!\n\n when 13\n # at line 1:125: COMMENT\n comment!\n\n end\n end",
"def parse(input)\n # Save for error msgs\n @input = input.clone\n @tokens = lex(input)\n @rpn = parse_expr\n\n assert_eos\n\n self\n end",
"def tokenize input\n s = StringScanner.new input\n\n @line = 0\n @line_pos = 0\n\n until s.eos? do\n pos = s.pos\n\n @tokens << case\n when s.scan(/\\r?\\n/) then\n token = [:NEWLINE, s.matched, *token_pos(pos)]\n @line_pos = s.pos\n @line += 1\n token\n when s.scan(/ +/) then\n [:INDENT, s.matched_size, *token_pos(pos)]\n when s.scan(/(=+)\\s*/) then\n level = s[1].length\n level = 6 if level > 6\n @tokens << [:HEADER, level, *token_pos(pos)]\n\n pos = s.pos\n s.scan(/.*/)\n [:TEXT, s.matched, *token_pos(pos)]\n when s.scan(/^(-{3,}) *$/) then\n [:RULE, s[1].length - 2, *token_pos(pos)]\n when s.scan(/([*-])\\s+/) then\n @tokens << [:BULLET, :BULLET, *token_pos(pos)]\n [:SPACE, s.matched_size, *token_pos(pos)]\n when s.scan(/([a-z]|\\d+)\\.[ \\t]+\\S/i) then\n list_label = s[1]\n width = s.matched_size - 1\n\n s.pos -= 1 # unget \\S\n\n list_type = case list_label\n when /[a-z]/ then :LALPHA\n when /[A-Z]/ then :UALPHA\n when /\\d/ then :NUMBER\n else\n raise ParseError, \"BUG token #{list_label}\"\n end\n\n @tokens << [list_type, list_label, *token_pos(pos)]\n [:SPACE, width, *token_pos(pos)]\n when s.scan(/\\[(.*?)\\]( +|$)/) then\n @tokens << [:LABEL, s[1], *token_pos(pos)]\n [:SPACE, s.matched_size, *token_pos(pos)]\n when s.scan(/(.*?)::( +|$)/) then\n @tokens << [:NOTE, s[1], *token_pos(pos)]\n [:SPACE, s.matched_size, *token_pos(pos)]\n else s.scan(/.*/)\n [:TEXT, s.matched, *token_pos(pos)]\n end\n end\n\n self\n end",
"def next_token\n\t\tif (token = @tokens.shift) != nil\n\t\t\t@copy << token\n\t\t\treturn token.get_token\n\t\telse\n\t\t\treturn nil\n\t\tend\n\tend",
"def id\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 46 )\n return_value = IdReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __TOKEN_REF198__ = nil\n __RULE_REF199__ = nil\n\n tree_for_TOKEN_REF198 = nil\n tree_for_RULE_REF199 = nil\n stream_RULE_REF = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token RULE_REF\" )\n stream_TOKEN_REF = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token TOKEN_REF\" )\n\n begin\n # at line 453:4: ( TOKEN_REF -> ID[$TOKEN_REF] | RULE_REF -> ID[$RULE_REF] )\n alt_85 = 2\n look_85_0 = @input.peek( 1 )\n\n if ( look_85_0 == TOKEN_REF )\n alt_85 = 1\n elsif ( look_85_0 == RULE_REF )\n alt_85 = 2\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n raise NoViableAlternative( \"\", 85, 0 )\n end\n case alt_85\n when 1\n # at line 454:4: TOKEN_REF\n __TOKEN_REF198__ = match( TOKEN_REF, TOKENS_FOLLOWING_TOKEN_REF_IN_id_3139 )\n if @state.backtracking == 0\n stream_TOKEN_REF.add( __TOKEN_REF198__ )\n end\n # AST Rewrite\n # elements: \n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 454:14: -> ID[$TOKEN_REF]\n @adaptor.add_child( root_0, @adaptor.create( ID, __TOKEN_REF198__ ) )\n\n\n\n return_value.tree = root_0\n\n end\n when 2\n # at line 455:4: RULE_REF\n __RULE_REF199__ = match( RULE_REF, TOKENS_FOLLOWING_RULE_REF_IN_id_3149 )\n if @state.backtracking == 0\n stream_RULE_REF.add( __RULE_REF199__ )\n end\n # AST Rewrite\n # elements: \n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 455:14: -> ID[$RULE_REF]\n @adaptor.add_child( root_0, @adaptor.create( ID, __RULE_REF199__ ) )\n\n\n\n return_value.tree = root_0\n\n end\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 46 )\n\n end\n \n return return_value\n end",
"def term\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 21 )\n return_value = TermReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n begin\n # at line 111:10: ( LOALPHA | reserved_words ) ( LOALPHA | DIGIT | DASH | UNDERSCORE | DOT | reserved_words )*\n # at line 111:10: ( LOALPHA | reserved_words )\n alt_32 = 2\n look_32_0 = @input.peek( 1 )\n\n if ( look_32_0 == LOALPHA )\n alt_32 = 1\n elsif ( look_32_0 == SCHEME || look_32_0.between?( CLASS, ACTIONS ) || look_32_0.between?( SELF, CATEGORY ) || look_32_0.between?( KIND, ACTION ) || look_32_0.between?( LINK, TERM ) )\n alt_32 = 2\n else\n raise NoViableAlternative( \"\", 32, 0 )\n end\n case alt_32\n when 1\n # at line 111:12: LOALPHA\n match( LOALPHA, TOKENS_FOLLOWING_LOALPHA_IN_term_923 )\n\n when 2\n # at line 111:22: reserved_words\n @state.following.push( TOKENS_FOLLOWING_reserved_words_IN_term_927 )\n reserved_words\n @state.following.pop\n\n end\n # at line 111:39: ( LOALPHA | DIGIT | DASH | UNDERSCORE | DOT | reserved_words )*\n while true # decision 33\n alt_33 = 7\n case look_33 = @input.peek( 1 )\n when LOALPHA then alt_33 = 1\n when DIGIT then alt_33 = 2\n when DASH then alt_33 = 3\n when UNDERSCORE then alt_33 = 4\n when DOT then alt_33 = 5\n when SCHEME, CLASS, TITLE, REL, LOCATION, ATTRIBUTES, ACTIONS, SELF, CATEGORY, KIND, MIXIN, ACTION, LINK, TERM then alt_33 = 6\n end\n case alt_33\n when 1\n # at line 111:41: LOALPHA\n match( LOALPHA, TOKENS_FOLLOWING_LOALPHA_IN_term_933 )\n\n when 2\n # at line 111:51: DIGIT\n match( DIGIT, TOKENS_FOLLOWING_DIGIT_IN_term_937 )\n\n when 3\n # at line 111:59: DASH\n match( DASH, TOKENS_FOLLOWING_DASH_IN_term_941 )\n\n when 4\n # at line 111:66: UNDERSCORE\n match( UNDERSCORE, TOKENS_FOLLOWING_UNDERSCORE_IN_term_945 )\n\n when 5\n # at line 111:79: DOT\n match( DOT, TOKENS_FOLLOWING_DOT_IN_term_949 )\n\n when 6\n # at line 111:85: reserved_words\n @state.following.push( TOKENS_FOLLOWING_reserved_words_IN_term_953 )\n reserved_words\n @state.following.pop\n\n else\n break # out of loop for decision 33\n end\n end # loop for decision 33\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 21 )\n\n end\n \n return return_value\n end",
"def text\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 54 )\n return_value = TextReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n text_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n # at line 344:23: (~ EOL )+\n # at file 344:23: (~ EOL )+\n match_count_38 = 0\n while true\n alt_38 = 2\n alt_38 = @dfa38.predict( @input )\n case alt_38\n when 1\n # at line 344:24: ~ EOL\n if @input.peek(1) == WS || @input.peek( 1 ).between?( DIGIT, T__59 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n else\n match_count_38 > 0 and break\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n eee = EarlyExit(38)\n\n\n raise eee\n end\n match_count_38 += 1\n end\n\n # syntactic predicate action gate test\n if @state.backtracking == 0\n # --> action\n return_value.value = @input.to_s( return_value.start, @input.look( -1 ) )\n # <-- action\n end\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 54 )\n memoize( __method__, text_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end",
"def tokenize\n \n end",
"def token!\n # at line 1:8: ( T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | T__33 | T__34 | T__35 | T__36 | T__37 | T__38 | T__39 | T__40 | T__41 | T__42 | T__43 | T__44 | T__45 | T__46 | T__47 | T__48 | T__49 | T__50 | T__51 | T__52 | T__53 | T__54 | T__55 | T__56 | T__57 | T__58 | T__59 | ST | ND | RD | TH | DIGIT | WS | EOL | LETTER )\n alt_7 = 56\n alt_7 = @dfa7.predict( @input )\n case alt_7\n when 1\n # at line 1:10: T__12\n t__12!\n\n when 2\n # at line 1:16: T__13\n t__13!\n\n when 3\n # at line 1:22: T__14\n t__14!\n\n when 4\n # at line 1:28: T__15\n t__15!\n\n when 5\n # at line 1:34: T__16\n t__16!\n\n when 6\n # at line 1:40: T__17\n t__17!\n\n when 7\n # at line 1:46: T__18\n t__18!\n\n when 8\n # at line 1:52: T__19\n t__19!\n\n when 9\n # at line 1:58: T__20\n t__20!\n\n when 10\n # at line 1:64: T__21\n t__21!\n\n when 11\n # at line 1:70: T__22\n t__22!\n\n when 12\n # at line 1:76: T__23\n t__23!\n\n when 13\n # at line 1:82: T__24\n t__24!\n\n when 14\n # at line 1:88: T__25\n t__25!\n\n when 15\n # at line 1:94: T__26\n t__26!\n\n when 16\n # at line 1:100: T__27\n t__27!\n\n when 17\n # at line 1:106: T__28\n t__28!\n\n when 18\n # at line 1:112: T__29\n t__29!\n\n when 19\n # at line 1:118: T__30\n t__30!\n\n when 20\n # at line 1:124: T__31\n t__31!\n\n when 21\n # at line 1:130: T__32\n t__32!\n\n when 22\n # at line 1:136: T__33\n t__33!\n\n when 23\n # at line 1:142: T__34\n t__34!\n\n when 24\n # at line 1:148: T__35\n t__35!\n\n when 25\n # at line 1:154: T__36\n t__36!\n\n when 26\n # at line 1:160: T__37\n t__37!\n\n when 27\n # at line 1:166: T__38\n t__38!\n\n when 28\n # at line 1:172: T__39\n t__39!\n\n when 29\n # at line 1:178: T__40\n t__40!\n\n when 30\n # at line 1:184: T__41\n t__41!\n\n when 31\n # at line 1:190: T__42\n t__42!\n\n when 32\n # at line 1:196: T__43\n t__43!\n\n when 33\n # at line 1:202: T__44\n t__44!\n\n when 34\n # at line 1:208: T__45\n t__45!\n\n when 35\n # at line 1:214: T__46\n t__46!\n\n when 36\n # at line 1:220: T__47\n t__47!\n\n when 37\n # at line 1:226: T__48\n t__48!\n\n when 38\n # at line 1:232: T__49\n t__49!\n\n when 39\n # at line 1:238: T__50\n t__50!\n\n when 40\n # at line 1:244: T__51\n t__51!\n\n when 41\n # at line 1:250: T__52\n t__52!\n\n when 42\n # at line 1:256: T__53\n t__53!\n\n when 43\n # at line 1:262: T__54\n t__54!\n\n when 44\n # at line 1:268: T__55\n t__55!\n\n when 45\n # at line 1:274: T__56\n t__56!\n\n when 46\n # at line 1:280: T__57\n t__57!\n\n when 47\n # at line 1:286: T__58\n t__58!\n\n when 48\n # at line 1:292: T__59\n t__59!\n\n when 49\n # at line 1:298: ST\n st!\n\n when 50\n # at line 1:301: ND\n nd!\n\n when 51\n # at line 1:304: RD\n rd!\n\n when 52\n # at line 1:307: TH\n th!\n\n when 53\n # at line 1:310: DIGIT\n digit!\n\n when 54\n # at line 1:316: WS\n ws!\n\n when 55\n # at line 1:319: EOL\n eol!\n\n when 56\n # at line 1:323: LETTER\n letter!\n\n end\n end",
"def parse_factor\r\n case @cur_token.kind\r\n when :SEARCHSTRING\r\n accept_it\r\n when :LPAREN\r\n accept_it\r\n parse_expression\r\n accept(:RPAREN)\r\n else\r\n error(\"\")\r\n end\r\n end",
"def id\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 46)\n return_value = IdReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __TOKEN_REF198__ = nil\n __RULE_REF199__ = nil\n\n tree_for_TOKEN_REF198 = nil\n tree_for_RULE_REF199 = nil\n stream_RULE_REF = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token RULE_REF\")\n stream_TOKEN_REF = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token TOKEN_REF\")\n\n begin\n # at line 462:4: ( TOKEN_REF -> ID[$TOKEN_REF] | RULE_REF -> ID[$RULE_REF] )\n alt_85 = 2\n look_85_0 = @input.peek(1)\n\n if (look_85_0 == TOKEN_REF) \n alt_85 = 1\n elsif (look_85_0 == RULE_REF) \n alt_85 = 2\n else\n @state.backtracking > 0 and raise(ANTLR3::Error::BacktrackingFailed)\n nvae = NoViableAlternative(\"\", 85, 0)\n raise nvae\n end\n case alt_85\n when 1\n # at line 463:4: TOKEN_REF\n __TOKEN_REF198__ = match(TOKEN_REF, TOKENS_FOLLOWING_TOKEN_REF_IN_id_3144) \n if @state.backtracking == 0\n stream_TOKEN_REF.add(__TOKEN_REF198__)\n end\n # AST Rewrite\n # elements: \n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 463:14: -> ID[$TOKEN_REF]\n @adaptor.add_child(root_0, @adaptor.create!(ID, __TOKEN_REF198__))\n\n\n\n return_value.tree = root_0\n\n end\n when 2\n # at line 464:4: RULE_REF\n __RULE_REF199__ = match(RULE_REF, TOKENS_FOLLOWING_RULE_REF_IN_id_3154) \n if @state.backtracking == 0\n stream_RULE_REF.add(__RULE_REF199__)\n end\n # AST Rewrite\n # elements: \n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 464:14: -> ID[$RULE_REF]\n @adaptor.add_child(root_0, @adaptor.create!(ID, __RULE_REF199__))\n\n\n\n return_value.tree = root_0\n\n end\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing(root_0)\n @adaptor.set_token_boundaries(return_value.tree, return_value.start, return_value.stop)\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node!(@input, return_value.start, @input.look(-1), re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 46)\n\n end\n \n return return_value\n end",
"def parse_single_token cur_tok\n\n\t\tcase cur_tok.type\n\t\twhen \"Digit\"\n\t\t\tif cur_tok.value.include? \".\"\n\t\t\t\treturn Term.new(cur_tok.line, cur_tok.col, magnitude: cur_tok.value.to_f)\n\t\t\telse\n\t\t\t\treturn Term.new(cur_tok.line, cur_tok.col, magnitude: cur_tok.value.to_i)\n\t\t\tend\n\t\twhen \"Identifier\"\n\t\t\treturn Reference.new(cur_tok.line, cur_tok.col, cur_tok.value)\n\t\twhen \"Keyword\"\n\t\t\tif cur_tok.value == \"true\" or cur_tok.value == \"false\"\n\t\t\t\treturn Boolean.new(cur_tok.line, cur_tok.col, cur_tok.value == \"true\")\n\t\t\telse\n\t\t\t\tthrow_error(\"Misplaced keyword.\", cur_tok)\n\t\t\tend\n\t\twhen \"Operator\"\n\t\t\treturn Operator.new(cur_tok.line, cur_tok.col, cur_tok.value)\n\t\twhen \"Punctuation\"\n\t\t\tthrow_error(\"Misplaced Punctuation.\", cur_tok)\n\t\twhen \"String\"\n\t\t\tthrow_error(\"Strings are not implemented in this parser.\", cur_tok)\n\t\twhen \"EOF\"\n\t\t\tthrow_error(\"EOF occured when parsing single token (the lexer & parser aren't talking to each other properly).\", cur_tok)\n\t\telse\n\t\t\tthrow_error(\"parse_single_token failed to identify the type of the token (the lexer & parser aren't talking to each other properly).\", cur_tok)\n\t\tend\n\tend",
"def with\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 34 )\n with_start_index = @input.index\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return \n end\n # at line 313:7: ( 'W' | 'w' ) ( 'I' | 'i' ) ( 'T' | 't' ) ( 'H' | 'h' )\n if @input.peek( 1 ).between?( T__54, T__55 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__24, T__25 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__16, T__17 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n if @input.peek( 1 ).between?( T__36, T__37 )\n @input.consume\n @state.error_recovery = false\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n mse = MismatchedSet( nil )\n raise mse\n end\n\n\n\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 34 )\n memoize( __method__, with_start_index, success ) if @state.backtracking > 0\n\n end\n \n return \n end",
"def next_line(input=[])\r\n @tokens = input\r\n @column = -1\r\n @line += 1\r\n end"
] | [
"0.71777105",
"0.7152235",
"0.711723",
"0.7098178",
"0.70267206",
"0.7022751",
"0.7022216",
"0.7018345",
"0.70166904",
"0.69872034",
"0.69616836",
"0.69474554",
"0.69171876",
"0.68387187",
"0.67689776",
"0.67452294",
"0.66701007",
"0.66021335",
"0.6599689",
"0.6596234",
"0.65913254",
"0.65913254",
"0.65913254",
"0.65913254",
"0.65879214",
"0.6587774",
"0.6582514",
"0.65547365",
"0.6551242",
"0.65436256",
"0.65397847",
"0.65394497",
"0.65265113",
"0.6499191",
"0.6492205",
"0.6492043",
"0.648723",
"0.64818543",
"0.6454355",
"0.6429808",
"0.6429713",
"0.642518",
"0.6409758",
"0.63993955",
"0.63972956",
"0.63972384",
"0.6366609",
"0.6362324",
"0.6352032",
"0.6342327",
"0.634184",
"0.6336916",
"0.63278294",
"0.6297959",
"0.62938505",
"0.6271491",
"0.62559783",
"0.6245974",
"0.62377644",
"0.62311757",
"0.62192446",
"0.62091124",
"0.62091124",
"0.62091124",
"0.62091124",
"0.6196791",
"0.6184623",
"0.6181711",
"0.617618",
"0.6169614",
"0.61689514",
"0.61669",
"0.61636686",
"0.61583287",
"0.6157673",
"0.6156645",
"0.6144518",
"0.61431175",
"0.6141652",
"0.6141643",
"0.6137715",
"0.6135391",
"0.6120106",
"0.6118995",
"0.6113268",
"0.61044073",
"0.60986745",
"0.60955834",
"0.60878575",
"0.60750073",
"0.6071881",
"0.6070196",
"0.60545915",
"0.6050812",
"0.6047329",
"0.6034043",
"0.60128355",
"0.6002642",
"0.60011154",
"0.60006744",
"0.59992707"
] | 0.0 | -1 |
GET /orders GET /orders.json | def index
@orders = Order.user_orders current_user
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getorders(args={})\n {\n :method=> \"GetOrders\"\n }.to_json\n end",
"def orders\n params = { command: 'account_orders' }\n get('/json.php', params)\n end",
"def index\n @orders = Order.all\n render json: @orders\n end",
"def index\n @orders = Order.all\n render json: @orders, status: 200\n end",
"def orders\n authenticated_post(\"orders\").body\n end",
"def orders\n authenticated_post(\"auth/r/orders\").body\n end",
"def index\n @orders = Order.order(\"id\").all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n respond_to do |format|\n format.html\n format.json { render :json => @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def get_all_orders() \n\tputs \"Getting all orders\"\n\tresponse = request_get(\"/api/order\")\n\tputs response.body\nend",
"def index\n #@orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def get_orders\n return ShopifyAPI::Order.all\n end",
"def index\n\t\t@orders = Order.all\n\t\trespond_with @orders\n\tend",
"def listOrders()\n if(!authenticateAdmin(params[:admin_id], params[:admin_auth_key]))\n render json: {status: false, reason: \"Authentication Failed\", data: \"\"}\n return\n end\n ret = []\n Order.find_each do |order|\n ret << {id: order.id, product_id: order.product_id, product_name: Product.find(order.product_id).name, user_id: order.user_id, quantity: order.quantity, price_per_unit: order.price_per_unit, int_status: order.order_status}\n end\n render json: {data: ret.to_json, reason: '', status: true}\n end",
"def index\n @orders = Order.all\n if @orders.count >= 1\n json_response(@orders)\n else\n json_response({ Message: Message.no_data }, :not_found)\n end\n end",
"def recent_orders()\n\t\twith_auth nil do |options|\n\t\t\tself.class.get(\"/api/v1/orders\", options)\n \tend\n\tend",
"def index\n @orders = Order.all\n render json: { status: 'SUCCESS', message: 'Loaded posts', data: @orders }\n end",
"def show\n render json: @order\n end",
"def show\n render json: @order\n end",
"def index\n @orders = Order.paginate page: params[:page], order: 'created_at desc', \n per_page: 10\n \n respond_to do |format|\n format.html\n format.json { render json: @orders } \n end\n end",
"def index\n @power_orders = PowerOrder.all\n render json: @power_orders\n end",
"def show\n order = Order.find(params[:id])\n render json: order\n end",
"def index\n @order_line_items = @order.order_line_items\n\n render json: @order_line_items\n end",
"def index\n @orders = Order.order(\"id DESC\").page(params[:page])\n\n if params[:user_id]\n @orders = @orders.where(:user_id => params[:user_id])\n end\n\n if Rails.configuration.orders_status.select{|k, v| v[:real]}.keys.include? params[:status]\n @orders = @orders.where(:status => params[:status])\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @admin_orders = Order.page(params[:page]).per(10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_orders }\n end\n end",
"def orders(params = {})\n get('/market/orders', params)\n .map { |data| build_persisted_order data }\n end",
"def show\n @order = Order.find(params[:id])\n\n render json: @order\n end",
"def index\n #data = HTTParty.get(\"http://localhost:8081/customers.json\")\n #p data.parsed_response[0]['email']\n if params[:customerId].present?\n @orders = Order.where(\"customerId\": params[:customerId].to_i)\n render json: @orders, status: 200\n elsif params[:id].present?\n @orders = Order.find_by id: params[:id]\n render json: @orders, status:200\n elsif params[:email].present?\n res = HTTParty.get(\"http://localhost:8081/customers/?email=#{params['email']}\")\n p res\n res = res.parsed_response\n @orders = Order.where(\"customerId\": res['id'].to_i)\n render json: @orders, status:200\n else\n @orders = Order.all\n end\n end",
"def all_orders(options)\n request :account, :get, 'allOrders', options\n end",
"def index\n @orders = @group.orders\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index # see model > order.rb\n # if current_user\n @orders = current_user.orders\n render \"index.json.jb\"\n # else\n # render json: [], status: :unauthorized\n # end\n end",
"def get_orders\n orders\n end",
"def index\n @orders = order.all\n end",
"def show\n @order = Order.find(params[:id])\n\n render json: @order, status: :ok\n\n end",
"def get_order(order_id)\n\tputs \"Getting order: \" + order_id\n\tresponse = request_get(\"/api/order/\" + order_id)\n\tputs response.body\nend",
"def index\n @orders = @orders.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = @branch.orders.limit(100).paginate(:page=>params[:page], :per_page=>20)\n logger.debug @orders.inspect\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = index_resources\n respond_with(@orders)\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show_orders\n @user = User.find_by_sql [\"select * from users where email = ? and users.status = 'admin'\", params[:email]]\n if @user.count != 0\n @orders = UsersOrder.find_by_sql [\"select users_orders.id, users.email, books.title, books.genre from users_orders\n LEFT JOIN users ON users_orders.user_id = users.id\n LEFT JOIN books ON users_orders.book_id = books.id where users_orders.status = ? ORDER BY users_orders.created_at ASC\", params[:status]]\n end\n render json: @orders\n end",
"def index\n @line_items = @order.line_items\n\n render json: @line_items\n end",
"def index\n @orders = Order.all\n end",
"def orders\n resp = API.connection.get 'api/orders', {\n user_email: @email,\n user_token: @token\n }\n\n case resp.status\n when 200\n resp.body.map { |order_hash| Order.new(order_hash) }\n when 401\n raise Teachable::AuthError, resp.body['error']\n else\n raise Teachable::Error, 'Unknown response.'\n end\n end",
"def customer_single_orders\n @orders = Order.where(customer_id: current_user.customer_id, category: :single)\n render json: @orders, status: 200\n\n end",
"def order(id, orderid = '')\n get(\"/accounts/#{id}/orders\")['orders']\n end",
"def index\n @orders = Order.all\n respond_to do |format|\n format.html { render layout: \"account\" }\n format.json { render :json => @orders }\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end",
"def index\n @orders = Order.all\n end",
"def orders(id, orderid = nil)\n get(\"/accounts/#{id}/orders#{orderid ? \"/#{orderid}\" : ''}\")['orders']\n end",
"def show\n \n @order = Order.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n \n end \n end",
"def index \n @orders = Order.all \n end",
"def searchByOrder\n \torderId = params['id']\n\t order = Order.where(id: orderId)\n\t render json: order, status: 200\n\tend",
"def index # class method\n @orders = Order.all\n end",
"def index # class method\n @orders = Order.all\n end"
] | [
"0.83267236",
"0.815406",
"0.80041754",
"0.7947603",
"0.7846487",
"0.7823147",
"0.78028905",
"0.76992875",
"0.76980406",
"0.7682792",
"0.7682792",
"0.7682792",
"0.7682792",
"0.76827645",
"0.75880665",
"0.7574349",
"0.756171",
"0.7531827",
"0.74860185",
"0.7432208",
"0.7369724",
"0.73326814",
"0.73287463",
"0.73287463",
"0.7286546",
"0.72852486",
"0.7270603",
"0.7269312",
"0.7264207",
"0.72581947",
"0.72464883",
"0.72452116",
"0.7238898",
"0.7233591",
"0.7217645",
"0.72125256",
"0.7156275",
"0.7141881",
"0.7086836",
"0.70671874",
"0.70659137",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70640796",
"0.70595765",
"0.70301193",
"0.70267373",
"0.70247364",
"0.70047027",
"0.69968164",
"0.6988875",
"0.6972612",
"0.69694895",
"0.6966311",
"0.696538",
"0.696538",
"0.6957052",
"0.69567907",
"0.6946388",
"0.6917995",
"0.69107455",
"0.69074893",
"0.69074893"
] | 0.0 | -1 |
GET /orders/1 GET /orders/1.json | def show
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getorders(args={})\n {\n :method=> \"GetOrders\"\n }.to_json\n end",
"def index\n @orders = Order.all\n render json: @orders\n end",
"def index\n @orders = Order.all\n render json: @orders, status: 200\n end",
"def index\n @orders = Order.order(\"id\").all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n if @orders.count >= 1\n json_response(@orders)\n else\n json_response({ Message: Message.no_data }, :not_found)\n end\n end",
"def get_order(order_id)\n\tputs \"Getting order: \" + order_id\n\tresponse = request_get(\"/api/order/\" + order_id)\n\tputs response.body\nend",
"def show\n order = Order.find(params[:id])\n render json: order\n end",
"def show\n @order = Order.find(params[:id])\n\n render json: @order\n end",
"def index\n\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def orders\n params = { command: 'account_orders' }\n get('/json.php', params)\n end",
"def index\n @orders = Order.all\n respond_to do |format|\n format.html\n format.json { render :json => @orders }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n render json: @order, status: :ok\n\n end",
"def show\n @v1_order = V1::Order.find(params[:id])\n\n if @v1_order.nil?\n render json: @v1_order, message: 'Resource not found', status: 404\n else\n render json: @v1_order, message: 'OK', status: 200\n end\n end",
"def index\n #@orders = Order.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def customer_single_orders\n @orders = Order.where(customer_id: current_user.customer_id, category: :single)\n render json: @orders, status: 200\n\n end",
"def show\n render json: @order\n end",
"def show\n render json: @order\n end",
"def get_all_orders() \n\tputs \"Getting all orders\"\n\tresponse = request_get(\"/api/order\")\n\tputs response.body\nend",
"def show\n \n @order = Order.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n \n end \n end",
"def index\n #data = HTTParty.get(\"http://localhost:8081/customers.json\")\n #p data.parsed_response[0]['email']\n if params[:customerId].present?\n @orders = Order.where(\"customerId\": params[:customerId].to_i)\n render json: @orders, status: 200\n elsif params[:id].present?\n @orders = Order.find_by id: params[:id]\n render json: @orders, status:200\n elsif params[:email].present?\n res = HTTParty.get(\"http://localhost:8081/customers/?email=#{params['email']}\")\n p res\n res = res.parsed_response\n @orders = Order.where(\"customerId\": res['id'].to_i)\n render json: @orders, status:200\n else\n @orders = Order.all\n end\n end",
"def index\n\t\t@orders = Order.all\n\t\trespond_with @orders\n\tend",
"def show\n @order1 = Order1.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order1 }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def index\n @order_line_items = @order.order_line_items\n\n render json: @order_line_items\n end",
"def orders\n authenticated_post(\"orders\").body\n end",
"def index\n @orders = Order.order(\"id DESC\").page(params[:page])\n\n if params[:user_id]\n @orders = @orders.where(:user_id => params[:user_id])\n end\n\n if Rails.configuration.orders_status.select{|k, v| v[:real]}.keys.include? params[:status]\n @orders = @orders.where(:status => params[:status])\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.json { render :json => @order }\n end\n end",
"def orders\n authenticated_post(\"auth/r/orders\").body\n end",
"def show\n @order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json=>@order }\n end\n end",
"def show\n @order = Order.find(params[:id])\n @title = \"Order #{@order.id}\"\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def recent_orders()\n\t\twith_auth nil do |options|\n\t\t\tself.class.get(\"/api/v1/orders\", options)\n \tend\n\tend",
"def show\n #@order = Order.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order }\n end\n end",
"def show\n if @order\n respond_to do |format|\n format.html { @order }\n format.json { render json: @order.to_json(include: [:status, :package, :discount]) }\n end\n else\n redirect_to orders_path, notice: \"Order ID not found for that client.\"\n end\n end",
"def index\n @orders = @group.orders\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @orders }\n end\n end",
"def index\n @power_orders = PowerOrder.all\n render json: @power_orders\n end",
"def index\n @admin_orders = Order.page(params[:page]).per(10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_orders }\n end\n end",
"def order(id, orderid = '')\n get(\"/accounts/#{id}/orders\")['orders']\n end",
"def retrieve_order(order_id:)\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::GET,\n '/v2/orders/{order_id}',\n 'default')\n .template_param(new_parameter(order_id, key: 'order_id')\n .should_encode(true))\n .header_param(new_parameter('application/json', key: 'accept'))\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end",
"def index # see model > order.rb\n # if current_user\n @orders = current_user.orders\n render \"index.json.jb\"\n # else\n # render json: [], status: :unauthorized\n # end\n end",
"def index\n @orders = Order.paginate page: params[:page], order: 'created_at desc', \n per_page: 10\n \n respond_to do |format|\n format.html\n format.json { render json: @orders } \n end\n end",
"def index\n @line_items = @order.line_items\n\n render json: @line_items\n end",
"def index\n @orders = order.all\n end",
"def searchByOrder\n \torderId = params['id']\n\t order = Order.where(id: orderId)\n\t render json: order, status: 200\n\tend",
"def get_order(order_id)\n res = request('get', \"https://#{orders_path}/#{order_id}\")\n parse_response res\n end",
"def show\n @order = Order.find(request[:order_id])\n @order_item = @order.order_items.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @order_item }\n end\n end",
"def index\n @orders = Order.all\n render json: { status: 'SUCCESS', message: 'Loaded posts', data: @orders }\n end",
"def fetch\n @order = Order.where(owner_id: current_user.branch.nodes.pluck(:id)).order('orders.quantity').last\n\n render json: {id: @order.id, quantity: @order.quantity,\n date: @order.created_at.strftime('%d/%m/%Y'),\n time: @order.created_at.strftime('%p %I:%M'),\n destination: \"#{t('fridge')} - #{@order.owner_id}\"\n }, status: :ok\n end",
"def get(order_id)\n get_request(t_url(:order, order_id))\n end",
"def get_order\n @order = Order.find(params[:id])\n end",
"def get_order\n @order = Order.find(params[:id])\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end",
"def index\n @orders = Order.all\n end"
] | [
"0.7554187",
"0.7488565",
"0.7475228",
"0.73777765",
"0.73714006",
"0.7341344",
"0.73384553",
"0.73168725",
"0.73018956",
"0.7291244",
"0.7291244",
"0.7291244",
"0.7291244",
"0.72911215",
"0.7281374",
"0.7266143",
"0.72498786",
"0.7228276",
"0.7213504",
"0.71842283",
"0.7126197",
"0.7126197",
"0.70958656",
"0.70884424",
"0.7045333",
"0.7017131",
"0.701173",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.7005317",
"0.69797677",
"0.6975385",
"0.6963179",
"0.69519156",
"0.6920044",
"0.6920044",
"0.69116527",
"0.6907297",
"0.6903982",
"0.6889989",
"0.6880894",
"0.68711925",
"0.6852713",
"0.68315697",
"0.68302864",
"0.68285227",
"0.68220717",
"0.6819676",
"0.6808189",
"0.67916673",
"0.678832",
"0.67851156",
"0.6750148",
"0.6737865",
"0.6725011",
"0.671648",
"0.6706029",
"0.67034036",
"0.67034036",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405",
"0.66981405"
] | 0.0 | -1 |
POST /parking_places/1/orders POST /parking_places/1/orders.json | def create
@order = Order.payment(current_user, @parking_place, order_params[:booked_time].to_i, order_params[:payment])
begin
respond_to do |format|
if @order.save
format.html { redirect_to @order, notice: t(:parking_place_successfully_booked) }
format.json { render :show, status: :created, location: @order }
else
format.html { render :new }
format.json { render json: @order.errors, status: :unprocessable_entity }
end
end
rescue Order::NeedPayment
respond_to do |format|
format.html { render :payment }
format.json { render json: @order.errors, status: :payment_required }
end
rescue Order::NotEnoughMoney
respond_to do |format|
format.html { render :payment }
format.json { render json: @order.errors, status: :unprocessable_entity }
end
rescue Order::ParkingPlaceNotFree
respond_to do |format|
format.html { redirect_to root_path, alert: t(:parking_place_not_free) }
format.json { render json: @order.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def submit_order()\n\tputs \"Submitting order\"\n\tdata = create_order()\n\tresponse = request_post(\"/api/order\", data)\n\tputs response.body\nend",
"def create_order(order_params)\n res = request('post', \"https://#{orders_path}\", order_params)\n parse_response res\n end",
"def orders\n authenticated_post(\"orders\").body\n end",
"def orders\n authenticated_post(\"auth/r/orders\").body\n end",
"def create\n order = Order.create(order_params)\n render json: order\nend",
"def add_orders params\n @orders_hash = JSON.parse(params)\n\n @orders_hash['orders'].each do |order|\n add_order(order[\"origin\"],order[\"destination\"],order[\"size\"])\n end\n true\n end",
"def create_order(order)\n build_persisted_order(\n post('/market/orders', order.symbolize_keys.merge(side: SIDES_MAP.fetch(order.fetch(:side))))\n )\n end",
"def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n persist_order_address\n format.html { redirect_to [:admin, @order], notice: 'Order was successfully created.' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = @orders.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: \"Order was successfully created.\" }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n if @order.save\n render json: { status: 'SUCCESS', data: @order }\n else\n render json: { status: 'ERROR', data: @order.errors }\n end\n end",
"def create_order(order)\n # response = post(\n # 'peatio/market/orders',\n # {\n # market: order.market.downcase,\n # side: order.side.to_s,\n # volume: order.amount,\n # price: order.price\n # }\n # )\n\n return if order.amount < 1e-8\n od = build_order(order)\n return if od.nil?\n\n# Arke::Log.debug \"Skip order creation #{od.to_json}\\n#{order.inspect}\"\n Ordering.new(od).submit\n @open_orders.add_order(order, od.id) if od.id\n Arke::Log.debug \"Order created #{od.to_json}\"\n\n # @open_orders.add_order(order, response.env.body['id']) if response.env.status == 201 && response.env.body['id']\n\n # response\n end",
"def create_order(options)\n request :account, :post, 'order', options\n end",
"def create\n @order = Order.new( order_params )\n @order.system_id = Order.set_system_id\n respond_to do |format|\n if @order.save\n set_order_values(@order,params)\n route = 'https://private-3643a-orderlordapi.apiary-mock.com/api/v1/jobs'\n respose = HTTParty.post(route, body: @body, :headers => @headers)\n parsed_response = respose.parsed_response.symbolize_keys!\n if \tparsed_response[:success] == \"true\"\n @order.tracker_hash = parsed_response[:tracker_hash]\n @order.save!\n end\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n items = params[:items]\n filtered_items = []\n items.each do |item|\n item[:quantity].times do |order|\n filtered_items << {\"apiKey\" => item[:item_api], \"customizationChoices\" => [], \"comments\" => item[:instruction]}\n end\n end\n token = ENV[\"REACT_APP_EAT_STREET_TOKEN\"]\n uri = URI.parse(\"https://api.eatstreet.com/publicapi/v1/send-order\")\n request = Net::HTTP::Post.new(uri)\n request.content_type = \"application/json\"\n request[\"X-Access-Token\"] = token\n request.body = JSON.dump({\n \"restaurantApiKey\" => processing_params[:restaurant_api_key],\n \"items\" => filtered_items,\n \"method\" => \"delivery\",\n \"payment\" => \"cash\",\n \"test\" => false,\n \"comments\" => processing_params[:comments],\n \"card\" => {\n \"apiKey\" => nil\n },\n \"address\" => {\n \"apiKey\" => nil,\n \"streetAddress\" => processing_params[:address],\n \"latitude\" => processing_params[:latitude],\n \"longitude\" => processing_params[:longitude]\n },\n \"recipient\" => {\n \"apiKey\" => nil,\n \"firstName\" => processing_params[:username],\n \"lastName\" => processing_params[:username],\n \"phone\" => processing_params[:phone],\n 'email' => processing_params[:email]\n }\n })\n\n req_options = {\n use_ssl: uri.scheme == \"https\",\n }\n\n response = Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|\n http.request(request)\n end\n\n message = ''\n if response.code == 200\n message = {error: false, details: 'You Order Has Been Processed'}\n items.each do |item|\n order = Order.find(item[:id])\n order.ordered = true\n end\n\n else\n message = response.body\n end\n\n render json: message\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, :notice=>\"Order was successfully created.\" }\n format.json { render :json=>@order, :status=>:created, :location=>@order }\n else\n format.html { render :action=>\"new\" }\n format.json { render :json=>@order.errors, :status=>:unprocessable_entry }\n end\n end\n end",
"def perform\n Magento2::Api.configure('dz4xnhhgfsfuyj00g6bkel0jq6mwdak2', 'hhjnlf59qh2m7an9sdpfcu0o9nox78y6', 'ie5iafduhqs1dydynidsjki582oti17w', 'mva5hldj17elic6muxmf53fq7zmm7xl5', \"https://mall2door.net\")\n orders = Magento2::Api.get(\"/rest/en/V1/orders\", {searchCriteria: 'all' })\n all_orders = orders[:items]\n all_orders.each do |order|\n unless order[:status].present?\n order_id = order[:increment_id]\n id = order[:entity_id]\n status = order[:state]\n params = {\n entity_id: id,\n increment_id: order_id,\n status: status,\n }\n if status\n Magento2::Api.put(\"/rest/en/V1/orders/create\", {entity: params})\n end\n end\n end\n end",
"def orders\n params = { command: 'account_orders' }\n get('/json.php', params)\n end",
"def cow_order\n @order = Order.new\n @order.lines.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @order }\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: t('app.orders.create.success') }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def phone_order(params)\n path = @version + '/Phone/Order/'\n method = 'POST'\n return request(path, method, params)\n end",
"def create \n order_params = (place_order_params)\n order_params[:customer_id] = current_user.customer_id\n @order = Order.new(order_params)\n if @order.recurring?\n if((Time.now + 1.hour).strftime('%H:%M:%S') <= (Time.parse(@order.place_date + ' ' + @order.timeslot.start)).strftime('%H:%M:%S') && !@order.completed? )\n # Add the auto generated entry\n @order.category = 'single'\n @order.created_by = 'auto'\n @order.save!\n end\n # Improve this\n if Order.create!(order_params)\n render json: @order, status: 201\n else\n render json: {'errors': ['Order can no be placed']}, status: :unprocessable_entity\n end\n else\n if (Time.now + 1.hour <= Time.parse(@order.place_date + ' ' + @order.timeslot.start)) && @order.save!\n render json: @order, status: 201\n else\n render json: {'errorrs': ['Order can not be placed']}, status: :unprocessable_entity\n end\n end \n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, :notice => 'Order was successfully created.' }\n format.json { render :json => @order, :status => :created, :location => @order }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @order.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @order = order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def order_post(order, api_key, opts = {})\n data, _status_code, _headers = order_post_with_http_info(order, api_key, opts)\n return data\n end",
"def create\n @order = current_owner.orders.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to orders_path, notice: 'Order was created successfully' }\n format.json { render :show, status: :created, location: orders_path }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Provider was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # @order = Order.new() \n total = 0\n \n @order = Order.new()\n for product in params[:_json]\n \n if (product[:quantity].nil? || product[:quantity].to_f < 1 || !isint(product[:quantity]))\n # Handle case when order invalid quantity\n render json: \"\", status: :bad_request\n return\n end\n\n @product = Product.find_by_name_and_size_id(product[:product], product[:size]) \n if @product.nil?\n # Handle case when order invalid products\n render json: \"\", status: :not_found\n return\n end \n total = total + @product.price * product[:quantity].to_f \n @order.orders_products << OrdersProduct.new(:product => @product, :hot => product[:hot], :quantity => product[:quantity]) \n end \n\n @order.total = total\n\n if @order.save\n render json: @order, status: :created, location: @order\n else\n render json: @order.errors, status: :unprocessable_entity\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n @v1_order = V1::Order.new(v1_order_params)\n\n if @v1_order.save\n render json: @v1_order, status: :OK, location: @v1_order\n else\n render json: @v1_order.errors, status: :unprocessable_entity\n end\n end",
"def create\n @order = Order.new(params[:order])\n listing = Listing.find(params[:listing_id])\n event = Event.find(params[:event_id])\n addr = Address.find_or_create_by_street_address( params[:address][:street_address])\n\n addr.update_attributes(params[:address])\n\n order_attributes = {\n shipment_method: params[:shipment_method],\n payment_date: Time.now(),\n payment_method: 0, #TODO\n shipment_date: Time.now(),\n status: 0, #TODO\n shipment_fee: 0,\n service_fee: 0,\n total_amount: params[:tickets_amount].to_i * listing.list_price,\n tickets_amount: params[:tickets_amount].to_i\n }\n\n @order.update_attributes(order_attributes)\n\n @order.seller = listing.seller\n @order.buyer = current_user\n @order.shipment_address = addr\n\n # get tickets\n tickets = listing.get_some_available_tickets params[:tickets_amount].to_i\n\n listing.tickets.each do |t|\n @order.items.build({\n selling_price: 0,\n ticket_id: t.id\n })\n end\n\n respond_to do |format|\n if @order.save\n #format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.html { redirect_to success_order_url(@order) }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def write_order(order, request_body = nil)\n path = \"/checkout/orders\"\n path += \"/#{order.id}\" if order.id\n\n request_body ||= order.to_json\n response = https_connection.post do |req|\n req.url path\n\n req.headers['Authorization'] = \"Klarna #{sign_payload(request_body)}\"\n req.headers['Accept'] = 'application/vnd.klarna.checkout.aggregated-order-v2+json',\n req.headers['Content-Type'] = 'application/vnd.klarna.checkout.aggregated-order-v2+json'\n req.headers['Accept-Encoding'] = ''\n\n req.body = request_body\n end\n handle_status_code(response.status, response.body)\n response\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, flash: { sucess: 'Order was successfully created.' } }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def post(resource, params)\n case resource\n when \"pedidos\", \"place_order\", \"new_order\" then url = \"/pedidos\"\n when \"envios\", \"shipping\" then url = \"/envios\"\n else url = \"/#{resource}\"\n end\n\n post_request(url, params)\n end",
"def new_order(params)\n camelcase_params!(params)\n call_api('NewOrder', params)\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_test_order(options)\n request :account, :post, 'order/test', options\n end",
"def make_api_call_for_order_creation(url, api_params, access_token)\n RestClient.post(url, api_params, Authorization: \"Bearer #{access_token}\", content_type: :json)\n end",
"def create\n @power_order = PowerOrder.new(power_order_params)\n @power_order.save\n render json: @power_order\n end",
"def create\n @order = Order.new(params[:order])\n @addresses = Address.where(:user_id => current_user.id)\n @order.status = 0;\n @order.delivery_cost = @order.delivery.cost\n @order.user = current_user\n @order_positions = current_user.basket.order_positions\n @order.order_positions = @order_positions\n @order.value = sum_price(@order_positions)\n\n respond_to do |format|\n if @order.save\n @order_positions.each do |op|\n op.container = @order\n op.save\n p = op.product\n p.availability -= op.amount\n p.save\n end\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def collect_order(shop_id, order)\n request(:post, \"shops/#{shop_id}/orders\", body: order).tap do |response|\n raise InvalidResponse, response.body unless response.status == 201\n end\n end",
"def order_params\n params.require(:order).permit(:address, :latitude, :longitude)\n end",
"def insert_order_with_http_info(order, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: OrderApi.insert_order ...'\n end\n # verify the required parameter 'order' is set\n if @api_client.config.client_side_validation && order.nil?\n fail ArgumentError, \"Missing the required parameter 'order' when calling OrderApi.insert_order\"\n end\n # resource path\n local_var_path = '/order/orders'\n\n # query parameters\n query_params = {}\n query_params[:'_expand'] = opts[:'_expand'] if !opts[:'_expand'].nil?\n\n # header parameters\n header_params = {}\n header_params['X-UltraCart-Api-Version'] = @api_client.select_header_api_version()\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json; charset=UTF-8'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(order)\n auth_names = ['ultraCartOauth', 'ultraCartSimpleApiKey']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'OrderResponse')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: OrderApi#insert_order\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create\n \tredirect_to orders_url\n# @order = Order.new(params[:order])\n\n# respond_to do |format|\n# if @order.save\n# format.html { redirect_to @order, notice: 'Order was successfully created.' }\n# format.json { render json: @order, status: :created, location: @order }\n# else\n# format.html { render action: \"new\" }\n# format.json { render json: @order.errors, status: :unprocessable_entity }\n# end\n# end\n end",
"def create\n @spree_order = Spree::Order.new(spree_order_params)\n\n respond_to do |format|\n if @spree_order.save\n format.html { redirect_to @spree_order, notice: t(\"activerecord.models.spree/order\") + t(\"messages.successfully_created\") }\n format.json { render :show, status: :created, location: @spree_order }\n else\n format.html { render :new }\n format.json { render json: @spree_order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def submit_orders_to_suppliers(customer, orders)\n raise ArgumentError unless customer.is_a? Customer and orders.is_a? Array\n\n # Submit each wine order to the revelant supplier\n orders.each do |order|\n RestClient.post \"#{order[:wine].supplier.url}orders\", {\n name: customer.name,\n address: customer.address,\n email: customer.email,\n upc: order[:wine].upc,\n quantity: order[:quantity]\n }.to_json, content_type: :json do |response|\n case response.code\n when 200, 201\n session[:basket].clear\n else\n #TODO: log an error\n puts 'error oh nooooo'\n end\n end\n end\n end",
"def create\n @order_way = OrderWay.new(order_way_params)\n\n respond_to do |format|\n if @order_way.save\n format.html { redirect_to @order_way, notice: 'Order way was successfully created.' }\n format.json { render :show, status: :created, location: @order_way }\n else\n format.html { render :new }\n format.json { render json: @order_way.errors, status: :unprocessable_entity }\n end\n end\n end",
"def orders(params = {})\n get('/market/orders', params)\n .map { |data| build_persisted_order data }\n end",
"def validate_order() \n\tputs \"Validating order\" \n\tdata = create_order()\n\tresponse = request_post(\"/api/order/validate\", data)\n\tputs response.body\nend",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to home_path, notice: 'Order was successfully created.' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(farm_order_params)\n respond_to do |format|\n if @farm_order.save\n format.html { redirect_to @farm_order, notice: 'Farm order was successfully created.' }\n format.json { render action: 'show', status: :created, location: @farm_order }\n else\n format.html { render action: 'new' }\n format.json { render json: @farm_order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_prov\n @order = @branch.orders.create(params[:order])\n respond_to do |format|\n if @order.save\n format.html { redirect_to branch_mov_prod_path(@branch,@order), notice: 'Order was successfully created.' }\n format.json { render json: @order }\n else\n format.html { render \"provision\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n @order_sales = @order.sales\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def order_params\n params.require(:order).permit(:user_id, :place_id, :status, :placed_at, :pickup_at, :delivered_at, :rating, :street, :city, :state, :postal_code, :phone, :driver_id)\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n # brb WET code.\n @order[:queue_total] = @order.get_queue_total \n @order[:queue_place] = @order.get_queue_place(@order)\n @order[:wait_time] = StoreConfig.find('avg_wait_time').value\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n respond_to do |format|\n if @order.save\n @order.order_create\n format.html { redirect_to @order, notice: 'Замовлення успішно створено.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n\n @basket = ActiveSupport::JSON.decode(cookies[\"basket\"])\n p @basket\n @basket.each do |order_position|\n @order_detail = OrderDetail.create!(order: @order, product_id: order_position[0], qty: order_position[1])\n end\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def order_params\n params.require(:order).permit(:address, :town, :county, :phone)\n end",
"def create\n @order, @errors = Order.create_from_cart(@cart, order_params)\n\n if @order\n render json: @order, status: :created, location: @order\n else\n render json: @errors, status: :unprocessable_entity\n end\n end",
"def create\n order = Order.create(order_params)\n if order.save\n render json: order\n else\n render new\n end\n end",
"def new_order_v1\n begin\n @parsed_json[\"order_items\"] ? order_items = @parsed_json[\"order_items\"] : nil\n @parsed_json[\"location_id\"] ? location_id = @parsed_json[\"location_id\"].to_i : nil\n location = Location.find_by_id(location_id)\n tax = location.tax #look up tax of location manually\n if location.nil? || location_id.nil? || order_items.nil? || tax.nil?\n return render :status => 404, :json => {:status => :failed, :error => \"Resource not found\"}\n end\n ActiveRecord::Base.transaction do\n if Order.is_current(@user.id, location)\n current_order = Order.find_by_user_id_and_location_id_and_is_paid_and_is_cancel(@user.id, location.id, 0, 0)\n ordered_date = current_order.created_at\n if current_order.order_items.length > 0\n ordered_date = Time.now.utc\n end\n\n order_item = OrderItem.add_new_item_v1(order_items, current_order)\n\n # re-calculate sub_price of order\n sub_price = Order.calculate_sub_price(current_order)\n fee = location.service_fee_type=='fixed'? location.fee : sub_price / 100 * location.fee\n\n if !order_item.prize_id.nil? && order_item.is_prize_item == 1\n current_order.update_attributes!(\n :location_id => location.id,\n :tax => tax,\n :fee => fee,\n :created_at => ordered_date,\n :status => 1,\n :sub_price => sub_price\n )\n else\n current_order.update_attributes!(\n :location_id => location.id,\n :tax => tax,\n :fee => fee,\n :created_at => ordered_date,\n :sub_price => sub_price\n )\n end\n\n if current_order.in_order?\n current_order.update_attributes!(:status => 2)\n end\n\n total_price = Order.calculate_total_price(current_order)\n current_order.update_attribute('total_price', total_price)\n\n return render :status => 200,\n :json => {\n :status => :success,\n :order_id => current_order.id,\n :order_item_id => order_item.id,\n :order_item_status => order_item.status\n }\n else # this is a brand-new Order\n today_orders = Order.where('created_at > ?', Time.now.strftime(\"%Y-%m-%d\")+\" 00:00:00\")\n\n new_order = Order.new\n new_order.user_id = @user.id\n new_order.tax = tax\n new_order.location_id = location.id\n new_order.timezone = location.timezone\n new_order.receipt_day_id = Order.next_receipt_no(location)\n new_order.ticket = today_orders.count+1\n order_item = OrderItem.add_new_item_v1(order_items, new_order)\n # if user added a prize item to cart, status of cart would be changed to ordered\n if !order_item.prize_id.nil? && order_item.is_prize_item == 1\n new_order.status = 1\n end\n sub_price = Order.calculate_sub_price(new_order)\n new_order.sub_price = sub_price\n new_order.total_tax = sub_price * tax\n new_order.total_tip = -2\n new_order.tip_percent = 0.16\n new_order.fee = location.service_fee_type=='fixed'? location.fee : sub_price / 100 * location.fee\n new_order.save!\n\n total_price = Order.calculate_total_price(new_order)\n new_order.update_attribute('total_price', total_price)\n # Automatically check the user in at this location (to allow item grading)\n @user.checkin_at(location, false)\n\n return render :status => 200,\n :json => {\n :status => :success,\n :order_id => new_order.id,\n :order_item_id => order_item.id,\n :order_item_status => order_item.status\n }\n end\n end\n rescue\n return render :status => 500, :json => {:status => :failed, :error => \"Internal Service Error\"}\n end\n end",
"def create\n @order = Order.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to orders_url, notice: 'Dati commessa caricati.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n @order.submitted_by_id = current_user.id\n respond_to do |format|\n if @order.save\n format.html { redirect_to orders_url, notice: 'Order was successfully created.' }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def order_params\n params.require(:order).permit(:address, :town_or_city, :state_or_county, :post_or_zip_code, :country)\n end",
"def create\n @order = Order.new(params[:order])\n end",
"def prepare_store\n order = SwaggerClient::Order.new(\"id\" => 10002,\n\t\t \"petId\" => 10002,\n\t\t \"quantity\" => 789,\n\t\t \"shipDate\" => \"2015-04-06T23:42:01.678Z\",\n\t\t \"status\" => \"placed\",\n\t\t \"complete\" => false)\n SwaggerClient::StoreApi.place_order(:body => order)\nend",
"def search_orders(body:)\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::POST,\n '/v2/orders/search',\n 'default')\n .header_param(new_parameter('application/json', key: 'Content-Type'))\n .body_param(new_parameter(body))\n .header_param(new_parameter('application/json', key: 'accept'))\n .body_serializer(proc do |param| param.to_json unless param.nil? end)\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end",
"def order_params\n params.require(:order).permit(:address, :price, :pickup_day, :return_day, :pickup_time, :return_time, :instructions, :dry_cleaning, :wash, :user_id, :provider_id, :status)\n end",
"def create_order(location_id:,\n body:)\n # Prepare query url.\n _query_builder = config.get_base_uri\n _query_builder << '/v2/locations/{location_id}/orders'\n _query_builder = APIHelper.append_url_with_template_parameters(\n _query_builder,\n 'location_id' => location_id\n )\n _query_url = APIHelper.clean_url _query_builder\n\n # Prepare headers.\n _headers = {\n 'accept' => 'application/json',\n 'content-type' => 'application/json; charset=utf-8'\n }\n\n # Prepare and execute HttpRequest.\n _request = config.http_client.post(\n _query_url,\n headers: _headers,\n parameters: body.to_json\n )\n OAuth2.apply(config, _request)\n _response = execute_request(_request)\n\n # Return appropriate response type.\n decoded = APIHelper.json_deserialize(_response.raw_body)\n _errors = APIHelper.map_response(decoded, ['errors'])\n ApiResponse.new(_response, data: decoded, errors: _errors)\n end",
"def create\n @admin_order = Order.new(params[:admin_order])\n\n respond_to do |format|\n if @admin_order.save\n format.html { redirect_to @admin_order, notice: 'Order was successfully created.' }\n format.json { render json: @admin_order, status: :created, location: @admin_order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @admin_order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n \n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, :print => 'true' }\n format.json { render action: 'show', status: :created, location: @order }\n else\n format.html { render action: 'new' }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render json: { order_id: @order.id }, status: :ok }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = current_user.orders.new(order_params)\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(order_params)\n @order.status = \"Pending\"\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n place = Place.new(params[:place])\n\n if place.save\n render json: place, status: :created, location: place\n else\n render json: place.errors, status: :unprocessable_entity\n end\n end",
"def save_order_data \n @order.customer_id = @customer.id \n @order.external_code = @parsed['id']\n @order.store_id = @parsed['store_id']\n @order.sub_total = @parsed['total_amount']\n @order.delivery_fee = @parsed['total_shipping']\n @order.total_shipping = @parsed['total_shipping']\n @order.total = @parsed['total_amount_with_shipping']\n @order.country = @parsed['shipping']['receiver_address']['country']['id']\n @order.state = @parsed['shipping']['receiver_address']['state']['name']\n @order.city = @parsed['shipping']['receiver_address']['city']['name']\n @order.district = @parsed['shipping']['receiver_address']['neighborhood']['name']\n @order.street = @parsed['shipping']['receiver_address']['street_name']\n @order.complement = @parsed['shipping']['receiver_address']['comment']\n @order.latitude = @parsed['shipping']['receiver_address']['latitude']\n @order.longitude = @parsed['shipping']['receiver_address']['longitude']\n @order.dt_order_create = @parsed['date_created']\n @order.postal_code = @parsed['shipping']['receiver_address']['zip_code']\n @order.number = @parsed['shipping']['receiver_address']['street_number']\n @order.save\n end",
"def order_params\n params.require(:order).permit(:system_id, :response, :tracker_hash)\n end",
"def create\n respond_to do |format|\n if order.save\n format.html { redirect_to order, notice: 'Order was successfully created.' }\n format.json { render json: order, status: ':created', location: order }\n else\n format.html { render action: 'new' }\n format.json { render json: order.errors, status: ':unprocessable_entity' }\n end\n end\n end",
"def create\n # 前端傳回的資料,呼叫 order_params 過濾前端傳回來的資料,用 new 創出一個 Order 的物件 (此時還是 ruby 物件喔)\n @order = Order.new(order_params)\n\n respond_to do |format|\n # save 是指把該筆 物件裡的資料存入資料庫內\n if @order.save\n # 若儲存成功,就導回該筆資料的 show 頁面\n format.html { redirect_to order_path(@order), notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n else\n # 若儲存失敗,就導回新增的頁面重新填寫資料\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def repair_order_create\n\n @business_id = current_business_user.id\n @client_id = current_client.id\n @repair_order = current_vehicle.repair_orders.new(\n repair_order_number: params[:repair_order_number],\n client_id: @client_id,\n business_user_id: @business_id)\n if @repair_order.save\n render json: { repair_order: @repair_order.as_json(include: :client),\n vehicle: @repair_order.as_json(include: :vehicle) },\n status: :created\n else\n render json: { errors: @repair_order.errors.full_messages },\n status: :unprocessable_entity\n end\n end",
"def add_production_order_with_http_info(production_orders, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: ProductionOrderApi.add_production_order ...'\n end\n # verify the required parameter 'production_orders' is set\n if @api_client.config.client_side_validation && production_orders.nil?\n fail ArgumentError, \"Missing the required parameter 'production_orders' when calling ProductionOrderApi.add_production_order\"\n end\n # resource path\n local_var_path = '/production_orders'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(production_orders)\n auth_names = ['api_key']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'ProductionOrder')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: ProductionOrderApi#add_production_order\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def order_params\n params.require(:order).permit(:status, :location_id, :shopping_date)\n end",
"def create\n res = HTTParty.get(\"http://localhost:8081/customers/?email=#{order_params['email'].to_s}\")\n codeCustomer = res.code\n dataCustomer = res.parsed_response\n p res\n res = HTTParty.get(\"http://localhost:8082/items/#{order_params['itemid'].to_s}.json\")\n #res = HTTParty.get(\"http://localhost:8082/items/#{order_params['itemid'].to_s}.json\")\n codeItem = res.code\n dataItem = res.parsed_response\n p dataItem\n if codeCustomer != 404 && codeItem != 404\n newParams = order_params\n newParams[\"award\"] = dataCustomer[\"award\"] \n newParams[\"price\"] = dataItem[\"price\"]\n newParams[\"total\"] = dataItem[\"price\"] - dataCustomer[\"award\"]\n p newParams\n \n #HTTParty.put(\"http://localhost:8081/customers/order?award=#{newParams['award']}&total=#{newParams[\"total\"]}&customerId=#{newParams['customerid']}\")\n end\n if codeCustomer == 404 || codeItem == 404\n if codeCustomer == 404 and codeItem == 404\n render json: {error: \"Customer and Item do not exist\"}, status: 400\n return\n end\n if codeCustomer == 404 and codeItem != 404\n render json: {error: \"Customer does not exist\"}, status: 400\n return\n end\n if codeCustomer != 404 and codeItem == 404\n render json: {error: \"Item does not exist\"}, status: 400\n return\n end\n else\n @order = Order.new\n @order.customerid = dataCustomer[\"id\"]\n @order.email = dataCustomer[\"email\"]\n @order.itemid = order_params[:itemid]\n @order.description = dataItem[\"description\"]\n @order.award = newParams[\"award\"]\n @order.total = newParams[\"total\"]\n @order.price = newParams[\"price\"]\n \n orderResult = HTTParty.put('http://localhost:8081/customers/order', \n :body => @order.to_json,\n :headers => {'Content-Type' => 'application/json', 'ACCEPT' => 'application/json'}\n )\n\n res = HTTParty.put(\"http://localhost:8082/items/#{order_params[:itemid]}?stockQty=#{dataItem['stockQty']-1}&description=#{dataItem['description']}&price=#{dataItem['price']}&id=#{order_params[:id]}\")\n p res\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: 'Order was successfully created.' }\n format.json { render :show, status: :created, location: @order }\n \n else\n format.html { render :new }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n @order.user_id = current_user.user_id\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to order_url(@order), notice: I18n.t('orders.successfully_created') }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @order = Order.new(params[:order])\n\n respond_to do |format|\n if @order.save\n format.html { redirect_to @order, notice: \"Don't think you're going to be there when we deliver? Leave cash in an envelope outside your door. Otherwise, we'll see you in person!\" }\n format.json { render json: @order, status: :created, location: @order }\n else\n format.html { render action: \"new\" }\n format.json { render json: @order.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.692877",
"0.6879032",
"0.6749802",
"0.665593",
"0.66548663",
"0.66374075",
"0.6625363",
"0.6558686",
"0.6527811",
"0.6513646",
"0.6504733",
"0.64470786",
"0.6440704",
"0.64008266",
"0.6398549",
"0.63868403",
"0.6369669",
"0.635966",
"0.63524467",
"0.6341479",
"0.6336716",
"0.63191265",
"0.63160425",
"0.63145924",
"0.62877303",
"0.62763184",
"0.62763184",
"0.62763184",
"0.62763184",
"0.62684053",
"0.624012",
"0.624012",
"0.6233666",
"0.6225083",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.62210804",
"0.619914",
"0.6193244",
"0.6181057",
"0.6172229",
"0.61647224",
"0.61585295",
"0.61540645",
"0.61498076",
"0.61342955",
"0.61337715",
"0.61304414",
"0.6127031",
"0.61268836",
"0.6114967",
"0.61121136",
"0.6108352",
"0.6101516",
"0.6095837",
"0.6091034",
"0.60791737",
"0.6072591",
"0.6065879",
"0.60508007",
"0.6042348",
"0.6036789",
"0.6030577",
"0.6026597",
"0.6016794",
"0.6005431",
"0.60002506",
"0.59970206",
"0.59937406",
"0.5989436",
"0.5985478",
"0.59703076",
"0.59667903",
"0.5966609",
"0.59661597",
"0.5961107",
"0.59599954",
"0.59593415",
"0.59590256",
"0.59567606",
"0.59562796",
"0.5941875",
"0.5938547",
"0.5937525",
"0.5929367",
"0.59248763",
"0.59239244",
"0.59172684",
"0.5913834",
"0.5911076",
"0.59098184",
"0.59001803",
"0.58858174"
] | 0.0 | -1 |
Use callbacks to share common setup or constraints between actions. | def set_order
@order = Order.find_for_user(params[:id], current_user)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def order_params
params.require(:order).permit(:booked_time, :payment)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n end",
"def param_whitelist\n [:role, :title]\n end",
"def expected_permitted_parameter_names; end",
"def safe_params\n params.except(:host, :port, :protocol).permit!\n end",
"def strong_params\n params.require(:team_member).permit(param_whitelist)\n end",
"def permitir_parametros\n \t\tparams.permit!\n \tend",
"def strong_params\n params.require(:community).permit(param_whitelist)\n end",
"def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end",
"def strong_params\n params.require(:education).permit(param_whitelist)\n end",
"def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end",
"def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end",
"def param_whitelist\n [:rating, :review]\n end",
"def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end",
"def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end",
"def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end",
"def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end",
"def valid_params_request?; end",
"def strong_params\n params.require(:experience).permit(param_whitelist)\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end",
"def allowed_params\n params.require(:allowed).permit(:email)\n end",
"def permitted_params\n []\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end",
"def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend",
"def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end",
"def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end",
"def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end",
"def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end",
"def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end",
"def safe_params\n params.require(:user).permit(:name)\n end",
"def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend",
"def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end",
"def check_params; true; end",
"def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end",
"def quote_params\n params.permit!\n end",
"def valid_params?; end",
"def paramunold_params\n params.require(:paramunold).permit!\n end",
"def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend",
"def filtered_parameters; end",
"def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end",
"def filtering_params\n params.permit(:email, :name)\n end",
"def check_params\n true\n end",
"def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend",
"def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend",
"def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end",
"def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end",
"def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end",
"def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend",
"def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end",
"def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end",
"def active_code_params\n params[:active_code].permit\n end",
"def filtering_params\n params.permit(:email)\n end",
"def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end",
"def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end",
"def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end",
"def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end",
"def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end",
"def list_params\n params.permit(:name)\n end",
"def filter_parameters; end",
"def filter_parameters; end",
"def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end",
"def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end",
"def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end",
"def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end",
"def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end",
"def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end",
"def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end",
"def url_whitelist; end",
"def admin_social_network_params\n params.require(:social_network).permit!\n end",
"def filter_params\n params.require(:filters).permit(:letters)\n end",
"def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end",
"def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end",
"def sensitive_params=(params)\n @sensitive_params = params\n end",
"def permit_request_params\n params.permit(:address)\n end",
"def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end",
"def secure_params\n params.require(:location).permit(:name)\n end",
"def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end",
"def question_params\n params.require(:survey_question).permit(question_whitelist)\n end",
"def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end",
"def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end",
"def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end",
"def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end",
"def url_params\n params[:url].permit(:full)\n end",
"def backend_user_params\n params.permit!\n end",
"def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend",
"def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end",
"def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end",
"def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end",
"def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end",
"def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end",
"def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end",
"def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end",
"def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end"
] | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",
"0.62894756",
"0.6283177",
"0.6242471",
"0.62382483",
"0.6217549",
"0.6214457",
"0.6209053",
"0.6193042",
"0.6177802",
"0.6174604",
"0.61714715",
"0.6161512",
"0.6151757",
"0.6150663",
"0.61461",
"0.61213595",
"0.611406",
"0.6106206",
"0.6105114",
"0.6089039",
"0.6081015",
"0.6071004",
"0.60620916",
"0.6019971",
"0.601788",
"0.6011056",
"0.6010898",
"0.6005122",
"0.6005122",
"0.6001556",
"0.6001049",
"0.59943926",
"0.5992201",
"0.59909594",
"0.5990628",
"0.5980841",
"0.59669393",
"0.59589154",
"0.5958826",
"0.5957911",
"0.5957385",
"0.5953072",
"0.59526145",
"0.5943361",
"0.59386164",
"0.59375334",
"0.59375334",
"0.5933856",
"0.59292704",
"0.59254247",
"0.5924164",
"0.59167904",
"0.59088355",
"0.5907542",
"0.59064597",
"0.5906243",
"0.5898226",
"0.589687",
"0.5896091",
"0.5894501",
"0.5894289",
"0.5891739",
"0.58860534",
"0.5882406",
"0.587974",
"0.58738774",
"0.5869024",
"0.58679986",
"0.5867561",
"0.5865932",
"0.5864461",
"0.58639693",
"0.58617616",
"0.5861436",
"0.5860451",
"0.58602303",
"0.5854586",
"0.58537364",
"0.5850427",
"0.5850199"
] | 0.0 | -1 |
Returns an array of locations for a circle. | def get_circle(location, r)
startY = location.y - r
startX = 0
endX = 0
if startY % 2 == 0
startX = location.x - (r / 2.0).floor
endX = location.x + r;
else
startX = location.x - (r / 2.0).floor
endX = location.x + r;
if r % 2 == 0
startX+=1; endX+=1;
end
end
endY = location.y + r;
circle_width = 2 * r + 1;
delta_y_odd = -1;
delta_y_even = -1;
offset = 0;
circle = Array.new;
for y in Range.new(startY, endY)
length = circle_width - (y - location.y).abs;
if (y % 2 == 0)
if (delta_y_even == -1)
delta_y_even = (y - location.y).abs;
end
offset = ((y - location.y).abs - delta_y_even) / 2;
if ((location.y - r) % 2 != 0)
offset-=1;
end
else
if (delta_y_odd == -1)
delta_y_odd = (y - location.y).abs;
end
offset = (((y - location.y).abs - delta_y_odd) / 2.0).floor - 1;
end
start = startX + offset;
#trace("y = " + y + "location = " + location.y + " : " + length);
s = "";
for x in Range.new(start, start + length - 1)
s += x.to_s + " ";
circle.push(Location.new(x,y));
end
#trace(s);
end
return circle;
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def points_for_circle(x, y, r)\n cur_page.points_for_circle(x, y, r)\n end",
"def location(x, y, theta, distance)\n \t return [\n \t x + distance * Math.cos(theta),\n \t y - distance * Math.sin(theta)\n \t ]\n \tend",
"def bresenham_circle_coordinates\n coordinates = []\n\n populate_symmetric = -> (x, y) {\n coordinates << [x, y]\n coordinates << [x, -y]\n coordinates << [-x, -y]\n coordinates << [-x, y]\n coordinates << [y, x]\n coordinates << [y, -x]\n coordinates << [-y, -x]\n coordinates << [-y, x]\n }\n\n # start from the upper point of the circle\n x = 0\n y = @r\n\n delta = 3 - 2 * y\n\n while x <= y do\n populate_symmetric[x, y]\n\n if delta < 0\n delta += 4 * x + 6\n else\n delta += 4 * (x - y) + 10\n y -= 1\n end\n\n x += 1\n end\n\n # move circle to proper center position\n coordinates.uniq.map { |x, y| [x + @x, y + @y] }\n end",
"def coords\n coord_list = []\n (@x..(@x + @size_x - 1)).each do |i|\n (@y..(@y + @size_y - 1)).each do |j|\n coord = [i, j]\n coord_list << coord\n end\n end\n\n return coord_list\n end",
"def coords_from_angle(theta)\n cos_theta = Math.cos(theta)\n sin_theta = Math.sin(theta)\n radius = @a * @b / Math.sqrt(@b * @b * cos_theta * cos_theta + @a * @a * sin_theta * sin_theta)\n [radius * cos_theta, radius * sin_theta]\n end",
"def in_polar_coordinates\n return [] unless recognized_format?\n radius_data.map.with_index { |r, i| [i * 2 * Math::PI / radius_data.size, r] }\n end",
"def coordinates\n [@y_location, @x_location]\n end",
"def sector_a_intfx_coords\n [ [-1.5 * @radius, @radius], [-1.5 * @radius, -@radius], \n [-1.5 * @radius, 3 * @radius], [-3 * @radius, 2 * @radius],\n [-3 * @radius, 0], [-3 * @radius, -2 * @radius],\n [-1.5 * @radius, -3 * @radius] ]\n end",
"def surrounding(position, radius)\n dlat = 0.0001\n dlng = 0.0001\n (1..radius).map do |r|\n (-r..r).map do |rlat|\n points = [sum(position, [dlat * rlat, dlng * (r - rlat.abs)])]\n if rlat.abs != r\n points += [sum(position, [dlat * rlat, dlng * (rlat.abs - r)])]\n end\n points\n end.flatten(1)\n end\nend",
"def calculate_coordinates\n bresenham_circle_coordinates\n end",
"def coordinate_bounds(latitude, longitude, radius)\n radius = radius.to_f\n factor = (Math::cos(latitude * Math::PI / 180.0) * 69.0).abs\n [\n latitude - (radius / 69.0),\n latitude + (radius / 69.0),\n longitude - (radius / factor),\n longitude + (radius / factor)\n ]\n end",
"def findCoordinates(spiralLocation)\n\treturner=[nil,nil]\n\tring=findRing(spiralLocation)#ring that this spiral location is on\n\tputs ring\n\tringLocation=spiralLocation-getArea(ring-1)\n\tringSection=ceiling(ringLocation/2.0/ring)\n\t#ok so, what we've got out of the ring section is ...\n\t# 1. whether we are on the x (if it's odd) or on the y (if it's even)\n\t# 2. what we need to subtract (ring section-ring)\n\t#soooo ... let's find out which one is which first\n\tif(ringSection%2)\n\t\treturner[0]=ring*-1*(2-ringSection)\n\t\treturner[1]=ringLocation-ringSection*ring\n\telse\n\t\treturner[1]=ring*-1*(3-ringSection)\n\t\treturner[0]=ringLocation-ringSection*ring\n\tend\n\treturn returner\nend",
"def coordinates\n coordinates = Array.new\n \n coordinates.push self.lat\n coordinates.push self.lng\n \n return coordinates\n end",
"def coordinate_array\n\t\t[latitude,longitude]\n\tend",
"def coordinates\n [latitude, longitude]\n end",
"def circle(radius)\n pixels = []\n \n @width.times do |x|\n row = Array.new(@height, nil)\n \n @height.times do |y|\n pixel = 0\n @density.times do |i|\n xx = (x + i.to_f/@density) - @x_offset\n yy = f_halfcircle(xx, radius)\n yt = yy + @y_offset\n yb = -yy + @y_offset\n\n if (yt >= y && yt < y + 1) || (yb >= y && yb < y + 1)\n pixel = 1\n end\n end\n row[y] = pixel\n end\n pixels << row\n end\n \n PNM.create(pixels.transpose.reverse!, {:type => :pbm})\n end",
"def coords\n [x, y]\n end",
"def coord_array\n _coords.dup\n end",
"def latlng\n [lat, lon]\n end",
"def coordinates\n arr = []\n (0...@size).each do |row|\n (0...@size).each do |column|\n arr << Coordinate.new(x: row,y: column)\n end\n end\n arr\n end",
"def points_on_circle(center, normal, radius, numseg)\r\n # Get the x and y axes\r\n axes = Geom::Vector3d.new(normal).axes\r\n center = Geom::Point3d.new(center)\r\n xaxis = axes[0]\r\n yaxis = axes[1]\r\n \r\n xaxis.length = radius\r\n yaxis.length = radius\r\n\r\n # compute the points\r\n da = (Math::PI * 2) / numseg\r\n pts = []\r\n for i in 0...numseg do\r\n angle = i * da\r\n cosa = Math.cos(angle)\r\n sina = Math.sin(angle)\r\n vec = Geom::Vector3d.linear_combination(cosa, xaxis, sina, yaxis)\r\n pts.push(center + vec)\r\n end\r\n \r\n # close the circle\r\n pts.push(pts[0].clone)\r\n\r\n pts\r\nend",
"def get_objects_at_coord(x_location, y_location)\n get_world_array[Matrix.two_to_one(x_location, y_location, @x_size)]\n end",
"def north_radius(xy, cluster)\n north_arr = []\n x, y = xy[0], xy[1]\n ny = y - 1\n in_cluster = cluster.include?([x, ny])\n while in_cluster == true\n \tnorth_arr << [x, ny]\n \tny -= 1\n \tin_cluster = false unless cluster.include?([x, ny])\n end\n radius = north_arr.length\n return radius \n end",
"def coordinates\n [rand(50), rand(90)]\n end",
"def locations\n @locations ||= Array(@grpc.locations).map do |l|\n Location.from_grpc l.lat_lng\n end\n end",
"def get_arc_points(cx,cy,radius, beg_angle, end_angle, degree_inc = 1.0)\r\n# - - - - - - - - - - - - - - - - - - - -\r\n deg = beg_angle\r\n degree_inc = degree_inc.abs\r\n ares = Array.new\r\n while (deg < end_angle)\r\n #print \"(deg = \", deg, \")\\n\"\r\n cp = calc_point_from_angle(cx,cy, deg, tradius)\r\n ares.append(cp)\r\n deg += degree_inc\r\n end #while\r\nend",
"def location\n [@posX, @posY, @facing]\n end",
"def map_locs\n [location]\n end",
"def map_locs\n [location]\n end",
"def all_circles\n (circles + Circle.globals).uniq\n end",
"def locations\n\t\t[]\n\tend",
"def find_radi(xy, cluster)\n north = north_radius(xy, cluster)\n south = south_radius(xy, cluster)\n east = east_radius(xy, cluster)\n west = west_radius(xy, cluster)\n ne = ne_radius(xy, cluster)\n nw = nw_radius(xy, cluster)\n se = se_radius(xy, cluster)\n sw = sw_radius(xy, cluster)\n\n all = [north, south, east, west, ne, nw, se, sw]\n return all\n end",
"def points_for_arc(x, y, r, start_angle, end_angle)\n cur_page.points_for_arc(x, y, r, start_angle, end_angle)\n end",
"def location\n b = []\n b << latitude\n b << longitude\n Geocoder.coordinates(b)\n end",
"def sp_points_on_circle(center, normal, radius, numseg, rotAngle)\n # Get the x and y axes\n axes = Geom::Vector3d.new(normal).axes\n center = Geom::Point3d.new(center)\n xaxis = axes[0]\n yaxis = axes[1]\n xaxis.length = radius\n yaxis.length = radius\n rotAngle = 0.0 unless rotAngle.is_a?(Numeric)\n # Compute the points\n da = (Math::PI*2) / numseg\n pts = []\n for i in 0...numseg do\n angle = rotAngle + (i * da)\n cosa = Math.cos(angle)\n sina = Math.sin(angle)\n vec = Geom::Vector3d.linear_combination(cosa, xaxis, sina, yaxis)\n pts.push(center + vec)\n end\n # Close the circle\n pts.push(pts[0].clone)\n pts\n end",
"def circle_nodes_at_point(nodeset=@nodes, center=Vector[@width/2,@height/2], radius=[@width,@height].min/2, reverse=false, offset=0)\n nodeset.each_with_index{|(key, node), i| nodeset[key].location = Vector[\n center[0] + (radius * Math.sin(offset+2*Math::PI*i/nodeset.length)), \n center[1] - (radius * Math.cos(offset+2*Math::PI*i/nodeset.length))]}\n end",
"def points\n [top_left, top_right, bottom_left, bottom_right]\n end",
"def location\n [lat.to_f / 100_000, lng.to_f / 100_000]\n end",
"def locations_within_locus(latitude, longitude, radius)\n uri_params = { lat: latitude, lon: longitude, radius: radius }\n @client.get('/BikePoint', uri_params)\n end",
"def coordinates\n [@data['latitude'].to_f, @data['longitude'].to_f]\n end",
"def coordinates\n [@data['latitude'].to_f, @data['longitude'].to_f]\n end",
"def coords_of_neighbors(x, y)\n coords_of_neighbors = []\n (x - 1).upto(x + 1).each do |neighbors_x|\n (y - 1).upto(y + 1).each do |neighbors_y|\n next if (x == neighbors_x) && (y == neighbors_y)\n coords_of_neighbors << [neighbors_x, neighbors_y]\n end\n end\n coords_of_neighbors\n end",
"def start_coords\n marker_coords('S')\n end",
"def bounding_box(lat, lon, radius_meters)\n radius_meters = radius_meters.to_f\n delta_lat = radius_meters / DEG_LAT_IN_METERS\n delta_lon = radius_meters / (DEG_LAT_IN_METERS * Math.cos(lat * DEG_TO_RAD))\n [\n lat - delta_lat,\n lon - delta_lon,\n lat + delta_lat,\n lon + delta_lon\n ]\n end",
"def coordinates\n [@data[:szer_geogr].to_f, @data[:dl_geogr].to_f]\n end",
"def locations\n unless defined?(@locations)\n @locations=[]\n for loc in Location.order(\"id ASC\").includes(:bottom_right_coordinate, :top_left_coordinate)\n @locations << loc if do_overlap_with?(loc.area)\n end \n end \n @locations\n end",
"def arround_square(scope = 1)\n sl = scope * COORDINATE_PER_KM\n return [XNavi::Coordinate.new(@x - sl, @y - sl), XNavi::Coordinate.new(@x+sl, @y+sl)]\n end",
"def lat_lon\n [lat, lon]\n end",
"def locations_within_proximity\n field.locations.where( x_coordinate: x_proximity,\n y_coordinate: y_proximity )\n end",
"def npc_unit_locations\n fields = \"location_x, location_y\"\n without_locking do\n Unit.in_zone(self).select(fields).group(fields).where(:player_id => nil).\n c_select_all\n end.each_with_object(Set.new) do |row, set|\n set.add SolarSystemPoint.new(id, row['location_x'], row['location_y'])\n end\n end",
"def neighbours(radius = 1)\n (-radius..radius).each_with_object([]) do |x_diff, array|\n (-radius..radius).each do |y_diff|\n next if (x_diff.zero? && y_diff.zero?) ||\n x_diff + x < 0 ||\n y_diff + y < 0 ||\n x_diff + x > game.size ||\n y_diff + y > game.size\n array << game.find_square(x_diff + x, y_diff + y)\n end\n end\n end",
"def sun_locations(loc)\r\n range = (@width / 4) - 1\r\n loc1 = loc - range\r\n\tloc2 = loc + range\r\n\tloc1 = (@width + loc1) if loc1 < 1\r\n\tloc2 = loc2 - @width if loc2 > @width\r\n #puts \"\"\r\n\t#puts loc1\r\n\t#puts loc\r\n\t#puts loc2\r\n\t\r\n\t#t = [loc1,loc2]\r\n\t\r\n\tss = []\r\n\tif loc1 < @sun_location\r\n\t [*loc1...(@sun_location)].each do |k|\r\n\t ss << k\r\n\t end\r\n\telsif (@sun_location+1) < loc1\r\n\t [*loc1..@width].each do |k|\r\n\t ss << k\r\n\t end\r\n\t [*1..(@sun_location-1)].each do |k|\r\n\t ss << k\r\n\t end\r\n\tend\r\n\t\r\n\tif @sun_location < loc2\r\n\t [*(@sun_location+1)..loc2].each do |k|\r\n\t ss << k\r\n\t end\r\n\telsif loc2 < @sun_location\r\n\t [*1..loc2].each do |k|\r\n\t ss << k\r\n\t end\r\n\t [*(@sun_location+1)..@width].each do |k|\r\n\t ss << k\r\n\t end\r\n\tend\r\n\t#puts ss\r\n\tss\r\n end",
"def nearby_cities(*options)\n attributes = options.extract_options!\n range = ( attributes.fetch(:range) { DEFAULT_RANGE } ).to_f\n if city = (find_city_by(attributes) || fuzzy_find_city_by(attributes))\n if range == 0\n [city]\n else\n cities = Locationer::City.find_by_sql(cities_within_radius(city, range))\n if cities.present?\n center_city = (cities.select{|c| c.id == city.id}).first\n end\n cities\n end\n else\n []\n end\n end",
"def latlon\r\n [latitude,longitude]\r\n end",
"def latlon\r\n [latitude,longitude]\r\n end",
"def latlon\r\n [latitude,longitude]\r\n end",
"def c_radius \n Math.sqrt((@x*@x)+(@y*@y))\n end",
"def find_center\n center = []\n center[0] = (@west + @east) / 2\n center[1] = (@south + @north) / 2\n\n # Handle bounding boxes that cross the dateline\n center[0] -= 180 if @west > @east\n\n center\n end",
"def to_coordinates\n [latitude, longitude]\n end",
"def circle_nodes(width=@width, height=@height, nodeset=@nodes)\n center = Vector[width/2, height/2]\n radius = [width,height].min/2\n nodeset.each_with_index{|(key, node), i| nodeset[key].location = Vector[\n center[0] + (radius * Math.sin(Math::PI/4+2*Math::PI*i/nodeset.length)), \n center[1] - (radius * Math.cos(Math::PI/4+2*Math::PI*i/nodeset.length))] if !nodeset[key].static}\n end",
"def get_coords_from_node(node)\n\t\t[node.x,node.y]\n\tend",
"def as_polar\n [r, t]\n end",
"def to_a\r\n @locs.dup\r\n end",
"def getPolyCoordinates(radius, lat, lon, num_sides)\r\n # Create array to the radian values of the regular polygon\r\n rads = (0..2*Math::PI).step((2 * Math::PI) / num_sides.to_f).to_a\r\n\r\n # Compute angular distance (meters)\r\n ang_dist = (radius.to_f / EARTH_RAD_M.to_f) #*(Math::PI/180.0)\r\n lat_rad = lat * Math::PI/180.0\r\n lon_rad = lon * Math::PI/180.0\r\n\r\n # Constants\r\n a = Math.sin(lat_rad) * Math.cos(ang_dist)\r\n b = Math.cos(lat_rad) * Math.sin(ang_dist)\r\n\r\n # Loop over radians (clockwise) to compute geo points of the polygon\r\n points = Array.new\r\n cnt = 0\r\n rads.each do |r|\r\n # new point latitude (in radians)\r\n new_lat = Math.asin(a + b * Math.cos(r)) \r\n\r\n # new point longitude (in radians)\r\n c = Math.cos(ang_dist) - Math.sin(lat_rad) * Math.sin(new_lat)\r\n new_lon = lon_rad + Math.atan2(Math.sin(r) * Math.sin(ang_dist) * Math.cos(lat_rad), c)\r\n\r\n # Store (in degrees)\r\n points[cnt] = {:lat => new_lat*180.0/Math::PI, :lon => new_lon*180.0/Math::PI}\r\n cnt += 1\r\n end\r\n\r\n return points\r\n end",
"def location\n if [latitude, longitude].all?(&:present?)\n [latitude, longitude]\n end\n end",
"def to_array\n @points.collect { |p| [p.x, p.y] }\n end",
"def get_coordinates(pos)\n row = (pos / @width) + 1\n col = (pos % @width) + 1\n [row, col]\n end",
"def building_coordinates(x, y, height, width)\n puts\"building #{height}x#{width} at location (#{x},#{y})\"\n coords = Array.new\n (0..width-1).each do |j|\n\n (0..height-1).each do |i|\n\n co = Coordinate.new(x+i,y+j)\n coords.push(co)\n end\n\n end\n puts\"*getBuildingCoordinates* returning array of building coordinates\"\n coords\n end",
"def get_marker()\r\n center = Geom::Point3d.new(0, 0, 0)\r\n rotate_around_vector = Geom::Vector3d.new(0, 0, 1)\r\n angle = 14.4.degrees\r\n tr = Geom::Transformation.rotation(center, rotate_around_vector, angle)\r\n vector = Geom::Vector3d.new(@radius, 0, 0)\r\n 26.times.map {center + vector.transform!(tr) }\r\n end",
"def marker_coords(marker)\n row = @grid.find_index { |x| x.include?(marker) }\n col = @grid[row].index(marker)\n\n [col, row]\n end",
"def find_nearby\n locations = read_locations_from FILE\n locations.select_within_radius 100 \n locations.sort.filter_fields\n end",
"def to_a\n [x_point,y_point]\n end",
"def locations\n return @locations\n end",
"def locations\n return @locations\n end",
"def to_a\n [lat,lng]\n end",
"def pos_to_a\r\n [ pos.x, pos.y ]\r\n end",
"def locations_surrounding_coordinates(location)\n\n possible_coordinates = possible_offsets.map do |offset|\n [(location.x_coordinate+offset[0]),(location.y_coordinate+offset[1])]\n end\n\n possible_coordinates.select do |coordinate|\n coordinate[0] > -1 && coordinate[0] < @field.height && coordinate[1] > -1 && coordinate[1] < @field.width\n end\n\n end",
"def lonlat\r\n [longitude,latitude]\r\n end",
"def lonlat\r\n [longitude,latitude]\r\n end",
"def to_a\n [lat, lng]\n end",
"def to_a\n [lat, lng]\n end",
"def circle_info(radius)\n {:area => Math::PI * (radius * radius),\n :perimeter => Math::PI * (2 * radius),\n :sphere_volume => ((4/3)* Math::PI * (radius ** 3)),\n :sphere_surface_area => (4* (Math::PI * (radius * radius)))}\nend",
"def nearbys(radius = 20, options = {})\n return [] unless geocoded?\n options = {:conditions => [\"id != ?\", id]}.merge(options)\n self.class.near(read_coordinates, radius, options) - [self]\n end",
"def get_coords(_ip_addr)\n g = GeoIP.new('GeoLiteCity.dat').city(_ip_addr)\n [g.latitude, g.longitude]\n end",
"def compute_position\n # The negative and the ninety are the fudge to compensate for our map.\n lat = @latitude_radians = radians(-@latitude)\n long = @longitude_radians = radians(@longitude + 90)\n radius = $app.globe.diameter / 2.0 - 23\n @x = radius * cos(lat) * sin(long)\n @y = radius * sin(lat)\n @z = radius * cos(lat) * cos(long)\n end",
"def get_locations\n response = execute_get(\"/reference/location\")\n Location.from_array(decode(response))\n end",
"def nine_point_circle\n # Circle.new(*self.medial.vertices)\n end",
"def compute_center\n x_center = center_value(@points, :x)\n y_center = center_value(@points, :y)\n [x_center, y_center]\n end",
"def get_node_coords(node_city)\n @nodes[node_city].coords\n end",
"def arc(x, y, radius, angle1, angle2)\n [x.value, y.value, radius.value, angle1.value * (Math::PI / 180.0), angle2.value * (Math::PI / 180.0)]\n end",
"def auto_center\n \treturn nil unless @markers\n return @markers.first.position if @markers.length == 1\n \tmaxlat, minlat, maxlon, minlon = Float::MIN, Float::MAX, Float::MIN, Float::MAX\n \t@markers.each do |marker| \n \t\tif marker.lat > maxlat then maxlat = marker.lat end\n \t\tif marker.lat < minlat then minlat = marker.lat end\n \t\tif marker.lon > maxlon then maxlon = marker.lon end \n \t\tif marker.lon < minlon then minlon = marker.lon end\n \tend\n \treturn [((maxlat+minlat)/2), ((maxlon+minlon)/2)]\n end",
"def circle radius, foreground, background\n coords = [-radius, -radius, radius, radius]\n \"oval#{coords.join(\",\")},fc:#{foreground},oc:#{background}\"\nend",
"def sensors\n result = []\n ['f','r',nil,'l'].each do |d|\n if !d\n result << nil\n next\n end\n ny, nx = near_xy(d)\n result << @labirint[ny][nx]\n end\n result\n end",
"def get_available_positions\n\t\tpositions = []\n\t\tfor i in (1..9) do\n\t\t\tx = ((i - 0.1) / 3).truncate\n\t\t\ty = (i - 1) % 3\n\t\t\tif self.is_valid?(x, y)\n\t\t\t\tpositions.push(i)\n\t\t\tend\n\t\tend\n\t\t\n\t\treturn positions\n\tend",
"def get_targets_nearby( range=1, type =\"ALL\" )\n coords = []\n if YGG::FULL_FIELD_SCAN\n coords.concat( YGG.create_range_data( range ) )\n else\n case direction\n when 2\n for i in 0..range\n coords.push( [0, i] )\n end\n when 4\n for i in 0..range\n coords.push( [-i, 0] )\n end\n when 6\n for i in 0..range\n coords.push( [i, 0] )\n end\n when 8\n for i in 0..range\n coords.push( [0, -i] )\n end\n end\n end\n for coo in coords\n o = ygg_get_targets( coo[0]+self.x, coo[1]+self.y, type, true )\n return o unless o.empty?()\n end\n return []\n end",
"def point_in_circle(p, c)\n difference_v = T_VECTORS.pop.set(p) - c.position\n radius_sq = c.r * c.r\n distance_sq = difference_v.lengthsq\n T_VECTORS.push(difference_v)\n # If the distance between is smaller than the radius then the point is inside the circle.\n distance_sq <= radius_sq\n end",
"def position\n [x, y]\n end",
"def get_lots_coordinates\n xml = get_lots\n coords = xml.xpath '/ArrayOflot/lot/latitude | /ArrayOflot/lot/longitude'\n coords.each_slice(2).to_a.map { |p| [p[0].text, p[1].text] }\n end",
"def coords; {:x => @x, :y => @y} end",
"def areas_by_radius(lat, lon, radius_meters)\n #get min/max latitude and longitude of radius around point\n min_lat, min_lon, max_lat, max_lon = radius_box = bounding_box(lat, lon, radius_meters)\n\n #estimate the size of boxes to target\n steps = estimate_steps_by_radius(radius_meters)\n #re-encode point using steps\n #the geohashes are composed of 32 distinct numbers/letters, basically base 32\n #bits are composed of 1s and 0s, base 2 or binary\n #steps is the length of the binary number for longitude and latitude, and the combined length of the binary string (which interleaves both the longitude and latitude) is 2*steps\n # since 32 is 2^5, while 2 is 2^1, the length of a base 32 number will be the length of a binary number divided by 5 and plus 1 (32 base 10 = 10000 base 2 = 10 base 32).\n str_len = steps*2/5 + 1\n hash = encode(lat, lon, str_len)\n\n #get neighbors of box\n neighbors = neighbors(hash)\n neighbors_neighbors = neighbors.each_with_object([]) {|neighbor, nb| nb << neighbors(neighbor)}\n\n # 25 geohashes surrounding the original\n nb = neighbors_neighbors.flatten.uniq\n\n # remove those geohashes that are outside the bounding box\n nb.each do |neighbor|\n n_latlng_low, n_latlng_high = decode(neighbor)\n if n_latlng_low[0] > max_lat or n_latlng_low[1] > max_lon or n_latlng_high[0] < min_lat or n_latlng_high[1] < min_lon\n nb -= [neighbor]\n end\n end\n\n #return remaining neighbor list\n nb\n end"
] | [
"0.68248504",
"0.67919993",
"0.64993674",
"0.64616084",
"0.6434406",
"0.6395247",
"0.6297898",
"0.62888646",
"0.62259",
"0.62119913",
"0.61935437",
"0.6125707",
"0.6115966",
"0.6093299",
"0.60277575",
"0.6024512",
"0.6022414",
"0.60211587",
"0.59975594",
"0.59971297",
"0.59191686",
"0.5849601",
"0.584855",
"0.58404416",
"0.58371633",
"0.5836767",
"0.58225566",
"0.5780375",
"0.5780375",
"0.5752249",
"0.5730793",
"0.57290673",
"0.5726132",
"0.57247376",
"0.57190055",
"0.57133657",
"0.5672836",
"0.5664181",
"0.5658951",
"0.5657821",
"0.5657821",
"0.5636794",
"0.56349176",
"0.561398",
"0.5599624",
"0.5592089",
"0.55920863",
"0.55822474",
"0.5573018",
"0.55700856",
"0.55699795",
"0.5569582",
"0.55591065",
"0.555516",
"0.555516",
"0.555516",
"0.5552421",
"0.5550694",
"0.5549709",
"0.55473053",
"0.553691",
"0.5534116",
"0.55324537",
"0.55206054",
"0.55193424",
"0.55176616",
"0.5511883",
"0.54909784",
"0.5485262",
"0.5461292",
"0.5458149",
"0.545362",
"0.544401",
"0.544401",
"0.54431766",
"0.54410917",
"0.54408133",
"0.5436779",
"0.5436779",
"0.54352635",
"0.54352635",
"0.54352623",
"0.5430922",
"0.5427994",
"0.5425182",
"0.54242784",
"0.54232687",
"0.54001915",
"0.5398208",
"0.53970176",
"0.5394425",
"0.5378302",
"0.5364484",
"0.5359286",
"0.53570265",
"0.53559566",
"0.5353536",
"0.53421056",
"0.53402364",
"0.53374654"
] | 0.700229 | 0 |
Initialize a Terminal Color | def initialize(options = {})
@enabled = options.fetch(:enabled) { supports? }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize_colors\n Ncurses.start_color\n [[Ncurses::COLOR_RED, Ncurses::COLOR_WHITE],\n [Ncurses::COLOR_GREEN, Ncurses::COLOR_BLACK],\n [Ncurses::COLOR_YELLOW, Ncurses::COLOR_BLACK],\n [Ncurses::COLOR_BLUE, Ncurses::COLOR_WHITE],\n [Ncurses::COLOR_MAGENTA, Ncurses::COLOR_BLACK],\n [Ncurses::COLOR_CYAN, Ncurses::COLOR_BLACK],\n [Ncurses::COLOR_WHITE, Ncurses::COLOR_BLACK]\n ].each.with_index(1) do |pair, i|\n # Initialize color.\n Ncurses.init_pair(i, pair[0], Ncurses::COLOR_BLACK)\n # Initialize reversed color.\n Ncurses.init_pair(i + 8, pair[1], pair[0])\n end\n Ncurses.init_pair(8, Ncurses::COLOR_BLACK, Ncurses::COLOR_WHITE)\n end",
"def initialize(color=0x000000)\n string = \"%.6x\" % color\n @r = string[0..1].hex\n @g = string[2..3].hex\n @b = string[4..5].hex\n end",
"def initialize\n @colours = default_colours\n @default = \"\\033[92m\"\n\n yield self if block_given?\n end",
"def start_color\n \"\\033[\"\n end",
"def initialize(color = COLOR_CLEAR)\n @color = color\n end",
"def initialize\n @color = :none\n @symbol = \" \"\n end",
"def initialize\r\n\t\t@color =\"platinum\"\r\n\tend",
"def start_color color\n \"\\e[#{COLORS[color]}m\"\n end",
"def initialize(color)\n @color = color\n end",
"def initialize(red, green, blue, alpha=255)\n set red, green, blue, alpha\n end",
"def initialize(color, position)\n super\n @color == \"white\" ? @symbol = \"♖\" : @symbol = \"♜\"\n end",
"def initialize(color={:cyan=> 1 ,:magenta => 0, :yellow => 0, :black => 0})\n @color=color\n end",
"def initialize(color, position)\n super\n @color == \"white\" ? @symbol = \"♔\" : @symbol = \"♚\"\n end",
"def std_colors\n FFI::NCurses.use_default_colors\n # 2018-03-17 - changing it to ncurses defaults\n FFI::NCurses.init_pair(0, FFI::NCurses::BLACK, -1)\n FFI::NCurses.init_pair(1, FFI::NCurses::RED, -1)\n FFI::NCurses.init_pair(2, FFI::NCurses::GREEN, -1)\n FFI::NCurses.init_pair(3, FFI::NCurses::YELLOW, -1)\n FFI::NCurses.init_pair(4, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(5, FFI::NCurses::MAGENTA, -1)\n FFI::NCurses.init_pair(6, FFI::NCurses::CYAN, -1)\n FFI::NCurses.init_pair(7, FFI::NCurses::WHITE, -1)\n # ideally the rest should be done by application\n #FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, -1)\n #FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n\n FFI::NCurses.init_pair(14, FFI::NCurses::WHITE, FFI::NCurses::CYAN)\n=begin\n FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::GREEN)\n FFI::NCurses.init_pair(11, FFI::NCurses::BLACK, FFI::NCurses::YELLOW)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n FFI::NCurses.init_pair(14, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(15, FFI::NCurses::BLACK, FFI::NCurses::WHITE)\n=end\n end",
"def initialize(color, display)\n @color = color\n @display = display\n end",
"def init_colors\n $desc_color = \"#{GREEN}\" # color of description portion\n # color the title based on priority\n $p5color = \"#{BLUE}#{BOLD}\" \n $p4color = \"#{MAGENTA}\" \n $p3color = \"#{CYAN}#{BOLD}\" \n $p2color = \"#{BOLD}\"\n $p1color = \"#{YELLOW}#{ON_RED}\"\n #\n # color for only the type column\n $bugcolor = \"#{BLACK}#{ON_RED}\"\n $enhcolor = \"#{GREEN}\"\n $feacolor = \"#{CYAN}\"\n\n # color for row of started event\n $startedcolor = \"#{STANDOUT}\"\n\n cols = %x[tput colors] rescue 8\n cols = cols.to_i\n if cols >= 256\n $desc_color = \"\\x1b[38;5;236m\" # 256 colors, grey\n $p5color = \"\\x1b[38;5;57m\" # some kinda blue\n $p4color = \"\\x1b[38;5;239m\" # grey. 256 colors\n $p3color = \"\\x1b[38;5;244m\" # grey, 256 colors\n end\n end",
"def initialize(color) \n @color = color #instance variable\n end",
"def colorNormal\n puts \"\\033[0m\"\n end",
"def colorize *args\n $terminal.color(*args)\nend",
"def init\r\n\r\n Curses.noecho\r\n Curses.start_color\r\n Curses.init_pair(COLOR_WHITE, COLOR_WHITE, COLOR_BLACK)\r\n Curses.init_pair(COLOR_RED, COLOR_RED, COLOR_WHITE)\r\n\r\n @@now = Time.now\r\n @@before_time = @@now\r\n\r\n @@limit = @@max_limit\r\n\r\n end",
"def initialize(pos, color)\n if color == 0\n @color = \"B\"\n else\n @color = \"W\"\n end\n @pos = pos\n end",
"def initialize(*c)\n if !c[0] then @color = pick_color else @color = c[0] end\n end",
"def add_colors\n\tNcurses.start_color\n\tcolors = %w[RED BLUE GREEN MAGENTA CYAN YELLOW]\n\tcolors.each { |color|\n\t\teval \"Ncurses.init_color( Ncurses::COLOR_#{color}, #{rand(0..1000)}, #{rand(0..1000)}, #{rand(0..1000)} )\"\n\t}\n\t#Ncurses.init_pair( PAIR_NUMBER, BORDER_LINE_COLOR, BORDER_COLOR)\n\trandom_color = eval \"Ncurses::COLOR_#{colors.sample}\"\n\tNcurses.init_pair(2, random_color, Ncurses::COLOR_RED)\n\tNcurses.init_pair(3, random_color, Ncurses::COLOR_BLUE)\n\tNcurses.init_pair(4, random_color, Ncurses::COLOR_GREEN)\n\tNcurses.init_pair(5, random_color, Ncurses::COLOR_MAGENTA)\n\tNcurses.init_pair(6, random_color, Ncurses::COLOR_CYAN)\n\tNcurses.init_pair(7, random_color, Ncurses::COLOR_YELLOW)\nend",
"def initialize(red, green, blue, alpha = 1.0); end",
"def initialize(color, pos, board)\n super(color, pos, board)\n @value = \"\\u265C \"\n end",
"def initialize(status, contents)\n @status = status\n @contents = contents\n @colorscheme = {}\n set_colorscheme\n end",
"def start!\n @color = @@colors[:green]\n end",
"def color(color_code)\n colors = color_code.scan(/\\d+/)\n\n # Extended set foreground x-term color\n if colors[0] == \"38\" && colors[1] == \"5\"\n return @fg_color = \"term-fgx#{colors[2]}\"\n end\n\n # Extended set background x-term color\n if colors[0] == \"48\" && colors[1] == \"5\"\n return @bg_color = \"term-bgx#{colors[2]}\"\n end\n\n # If multiple colors are defined, i.e. \\e[30;42m\\e then loop through each\n # one, and assign it to @fg_color or @bg_color\n colors.each do |cc|\n c_integer = cc.to_i\n\n # Reset all styles\n if c_integer == 0\n @fg_color = nil\n @bg_color = nil\n @other_colors = []\n\n # Primary (default) font\n elsif c_integer == 10\n # no-op\n\n # Turn off bold / Normal color or intensity (21 & 22 essentially do the same thing)\n elsif c_integer == 21 || c_integer == 22\n @other_colors.delete(\"term-fg1\")\n @other_colors.delete(\"term-fg2\")\n\n # Turn off italic\n elsif c_integer == 23\n @other_colors.delete(\"term-fg3\")\n\n # Turn off underline\n elsif c_integer == 24\n @other_colors.delete(\"term-fg4\")\n\n # Turn off crossed-out\n elsif c_integer == 29\n @other_colors.delete(\"term-fg9\")\n\n # Reset foreground color only\n elsif c_integer == 39\n @fg_color = nil\n\n # Reset background color only\n elsif c_integer == 49\n @bg_color = nil\n\n # 30–37, then it's a foreground color\n elsif c_integer >= 30 && c_integer <= 37\n @fg_color = \"term-fg#{cc}\"\n\n # 40–47, then it's a background color.\n elsif c_integer >= 40 && c_integer <= 47\n @bg_color = \"term-bg#{cc}\"\n\n # 90-97 is like the regular fg color, but high intensity\n elsif c_integer >= 90 && c_integer <= 97\n @fg_color = \"term-fgi#{cc}\"\n\n # 100-107 is like the regular bg color, but high intensity\n elsif c_integer >= 100 && c_integer <= 107\n @fg_color = \"term-bgi#{cc}\"\n\n # 1-9 random other styles\n elsif c_integer >= 1 && c_integer <= 9\n @other_colors << \"term-fg#{cc}\"\n end\n end\n end",
"def system_color\n return Color.new(255,255,0)\n end",
"def initialize(color, pos, board)\n super(color, pos, board)\n @value = \"\\u265E \"\n end",
"def initialize text=\" \", **kwargs\n @color = kwargs[:color] || [0,0,0]\n @font = kwargs[:font] || Default\n self.text= text\n end",
"def open!\n @color = @@colors[:cyan]\n end",
"def color=(c)\n @color = Color.new(c)\n end",
"def initialize(style=nil, color=nil)\n @line_character = nil\n @line_length = nil\n @target_style = nil\n @target_foreground = nil\n @target_background = nil\n @task_style = nil\n @task_foreground = nil\n @task_background = nil\n @success_style = nil\n @success_foreground = nil\n @success_background = nil\n @error_style = nil\n @error_foreground = nil\n @error_background = nil\n apply(color ? COLOR_STYLE : DEFAULT_STYLE)\n apply(style)\n end",
"def initialize(number, color)\n @number = number\n @color = color\n end",
"def initialize(opts={})\n super(opts)\n @fg = opts[:fg]\n @bg = opts[:bg]\n\n if @fg and not COLORS.has_key?(@fg)\n raise ArgumentError.new \"fg :#{@fg} is not a valid color\"\n end\n\n if @bg and not COLORS.has_key?(@bg)\n raise ArgumentError.new \"bg :#{@bg} is not a valid color\"\n end\n\n # IO handle to use as the console\n @io = opts[:io] || STDERR\n\n # from https://github.com/sickill/rainbow/blob/master/lib/rainbow.rb\n @enabled = @io.tty? && ENV['TERM'] != 'dumb' || ENV['CLICOLOR_FORCE'] == '1'\n\n add_reader { |line,src| display(src, line) }\n end",
"def initialize(color)#recibe el color de brown\n p super# trae el comportamiento de la superclase con los valores de name\n p @color = color #el valor de colo lo declaramos como variable de instancia\n end",
"def reset_colors\n @color_output = false\n\n #Term::ANSIColor.coloring = true\n c = Term::ANSIColor\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n end",
"def set_colors\n if @color_output \n @c_app_info = @color_app_info\n @c_app_exe = @color_app_exe\n @c_command = @color_command\n @c_description = @color_description\n @c_parameter = @color_parameter\n @c_usage = @color_usage\n \n @c_error_word = @color_error_word\n @c_error_name = @color_error_name\n @c_error_description = @color_error_description\n \n @c_bold = @color_bold\n @c_reset = @color_reset\n else\n @c_app_info, @c_app_exe, @c_command, \n @c_description, @c_parameter, @c_usage, \n @c_bold, @c_reset, @c_error_word, \n @c_error_name, @c_error_description = [\"\"]*11\n end\n end",
"def initialize(x, y, c=\"red\")\n\t\tsuper(x, y)\n\t\t@color = c\n\tend",
"def initialize\n FFI::NCurses.initscr\n #FFI::NCurses.start_color\n FFI::NCurses.curs_set(0)\n FFI::NCurses.nodelay(FFI::NCurses.stdscr, true)\n FFI::NCurses.cbreak\n FFI::NCurses.raw\n FFI::NCurses.noecho\n FFI::NCurses.start_color\n FFI::NCurses.clear\n end",
"def new\n @color = Color.new\n end",
"def init_style\n if Curses.start_color then\n Curses.use_default_colors\n @colors = true\n # This is a a hack. I'm abusing the COLOR_ constants to make a color on\n # default background color pair.\n Curses.init_pair Curses::COLOR_CYAN, Curses::COLOR_CYAN, -1\n Curses.init_pair Curses::COLOR_GREEN, Curses::COLOR_GREEN, -1\n Curses.init_pair Curses::COLOR_WHITE, Curses::COLOR_WHITE, -1\n\n @link_style = Curses.color_pair(Curses::COLOR_CYAN) | Curses::A_UNDERLINE\n @hover_style =\n Curses.color_pair(Curses::COLOR_WHITE) | Curses::A_BOLD |\n Curses::A_UNDERLINE\n else\n @link_style = Curses::A_UNDERLINE\n @hover_style = Curses::A_BOLD\n end\n end",
"def color(color); end",
"def red\n colorize \"\\033[31m\"\n end",
"def colorize!(color_code) \"#{COLORS[color_code]}#{self.to_s}\\e[0m\" ; end",
"def reset_colors\n @color_output ||= true\n\n # Build the default colors\n Term::ANSIColorHI.coloring = color_output\n c = Term::ANSIColorHI\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n @screen_clear = \"\\e[H\\e[2J\"\n end",
"def set_colors\n if color_output\n @c_app_info = @color_app_info\n @c_app_exe = @color_app_exe\n @c_command = @color_command\n @c_description = @color_description\n @c_parameter = @color_parameter\n @c_usage = @color_usage\n \n @c_error_word = @color_error_word\n @c_error_name = @color_error_name\n @c_error_description = @color_error_description\n \n @c_bold = @color_bold\n @c_reset = @color_reset\n else\n @c_app_info, @c_app_exe, @c_command, @c_description,\n @c_parameter, @c_usage, @c_bold, @c_reset, @c_error_word,\n @c_error_name, @c_error_description = [\"\"]*12\n end\n end",
"def set_bg\n STDOUT.write \"\\033[48;5;#{to_xterm}m\"\n end",
"def color(color=32)\n printf \"\\033[#{color}m\"\n yield\n printf \"\\033[0m\"\nend",
"def cyan; if @options[:colors]; \"\\e[1;36m\" else \"\" end end",
"def normal_color\n #return Color.new(255,255,255)\n end",
"def initialize(x,y,c=\"clear\")\n super(x,y) # keyword super calls same method in superclass\n @color = c\n end",
"def initialize(color:, **opts)\n super(**opts)\n @color = color\n end",
"def nc\n Ncurses::COLOR_PAIR(@id)\n end",
"def activate_256_colors\n require 'ruco/file_store'\n (\n # not windows\n RbConfig::CONFIG['host_os'] !~ /mswin|mingw/ and\n\n # possible to open xterm-256color\n ['xterm', 'xterm-256color'].include?(ENV['TERM']) and\n Ruco::FileStore.new('~/.ruco/cache').cache('color_possible'){\n system(%{TERM=xterm-256color ruby -r curses -e 'Curses.noecho' > /dev/null 2>&1})\n }\n\n # finally switch terminal, so curses knows we want colors\n ) and ENV['TERM'] = 'xterm-256color'\nend",
"def initialize(fg, bg, *tags)\n @@id ||= 0\n @@id += 1\n @id = @@id\n\n @tags = Set.new(tags)\n\n # FIXME need to verify input is in valid domain\n # before conveRETermng it to symbol w/ \"intern\"\n fg = fg.to_s.downcase.intern if fg.is_a?(String) || fg.is_a?(Symbol)\n bg = bg.to_s.downcase.intern if bg.is_a?(String) || bg.is_a?(Symbol)\n @fg, @bg = fg, bg\n\n fgc = fg.is_a?(Symbol) ? Ncurses.const_get(\"COLOR_#{fg.to_s.upcase}\") : fg\n bgc = bg.is_a?(Symbol) ? Ncurses.const_get(\"COLOR_#{bg.to_s.upcase}\") : bg\n\n @fgc, @bgc = fgc, bgc\n\n Ncurses.init_pair(@id, fgc, bgc)\n end",
"def blue = \"\\e[36m#{self}\\e[0m\"",
"def color(options)\n set RGhost::Color.create(options)\n end",
"def colorize(*args)\n shell.set_color(*args)\n end",
"def initialize(color)\n assert_type 'color', color, Sass::Script::Color\n @color = color\n end",
"def color_new\n return GT::Color.malloc\n end",
"def color_new\n return GT::Color.malloc\n end",
"def post_initialize(args)\n @tape_color = args[:tape_color]\n end",
"def post_initialize(args)\n @tape_color = args[:tape_color]\n end",
"def colorize(params)\n return self unless STDOUT.isatty\n\n begin\n require \"Win32/Console/ANSI\" if RUBY_PLATFORM.match?(/win32/)\n rescue LoadError\n raise \"You must gem install win32console to use colorize on Windows\"\n end\n\n color_parameters = {}\n\n if params.instance_of?(Hash)\n color_parameters[:color] = COLORS[params[:color]]\n color_parameters[:background] = COLORS[params[:background]]\n color_parameters[:mode] = MODES[params[:mode]]\n elsif params.instance_of?(Symbol)\n color_parameters[:color] = COLORS[params]\n end\n\n color_parameters[:color] ||= @color ||= COLORS[:default]\n color_parameters[:background] ||= @background ||= COLORS[:default]\n color_parameters[:mode] ||= @mode ||= MODES[:default]\n\n color_parameters[:uncolorized] ||= @uncolorized ||= dup\n\n # calculate bright mode\n color_parameters[:color] += 50 if color_parameters[:color] > 10\n\n color_parameters[:background] += 50 if color_parameters[:background] > 10\n\n \"\\033[#{color_parameters[:mode]};#{color_parameters[:color] + 30};\"\\\n \"#{color_parameters[:background] + 40}m#{color_parameters[:uncolorized]}\\033[0m\"\\\n .color_parameters(color_parameters)\n end",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def hsla_color; end",
"def from_rgb(red, green, blue)\n Inker.color(\"rgb(#{red}, #{green}, #{blue})\")\n end",
"def initialize(name,bg_color,text_color,font_name,prompt)\n @name = name\n @bgColor = bg_color\n @textColor = text_color\n @fontName = font_name\n @prompt = prompt\n @active = false\n end",
"def color\n @color ||= COLORS[label.length%COLORS.length].to_sym\n end",
"def reset\n # color is enabled by default, can be turned of by switch --no-color\n Term::ANSIColor.coloring = true\n end",
"def on_49(_) { fg: fg_color(9) } end",
"def get_color_code\n\t\t{ r: @term_hex[0], g: @term_hex[1], b: @term_hex[2], alpha: @term_hex[-1] }\n\tend",
"def normal_color\n return Color.new(255, 255, 255)\n end",
"def post_initialize(args)\n @tape_color = args[:tape_color]\n end",
"def text_color(n)\n case n\n when Integer then super(n)\n when Array then Color.new(*n)\n when Color then n\n else super(0)\n end\n end",
"def color(red, green, blue)\n r = red << 4\n g = green << 5\n b = blue << 6\n send(RESET ^ (r | g | b))\n rescue\n end",
"def init_screen\n\t\t@screen = Curses.init_screen\n\t\tCurses.start_color\n\t\tCurses.stdscr.keypad(true)\n\t\tCurses.init_pair(Curses::COLOR_GREEN, Curses::COLOR_GREEN, Curses::COLOR_BLACK)\n\t\tCurses.init_pair(Curses::COLOR_RED, Curses::COLOR_RED, Curses::COLOR_BLACK)\n\t\tCurses.init_pair(Curses::COLOR_WHITE, Curses::COLOR_WHITE, Curses::COLOR_BLACK)\n\t\tCurses.init_pair(Curses::COLOR_CYAN, Curses::COLOR_CYAN, Curses::COLOR_BLACK)\n\t\tCurses.init_pair(Curses::COLOR_BLUE, Curses::COLOR_BLUE, Curses::COLOR_BLACK)\n\t\tCurses.init_pair(Curses::COLOR_YELLOW, Curses::COLOR_YELLOW, Curses::COLOR_BLACK)\n\t\tCurses.init_pair(Curses::COLOR_MAGENTA, Curses::COLOR_MAGENTA, Curses::COLOR_BLACK)\n\t\tbegin\n\t\t\tyield\n\t\tensure\n\t\t\tCurses.close_screen\n\t\tend\n\tend",
"def initialize(number = nil, symbol = nil, shading = nil, color = nil)\n @number = number\n @symbol = symbol\n @shading = shading\n @color = color\n end",
"def create_color_pair(bgcolor, fgcolor)\n code = (bgcolor*10) + fgcolor\n FFI::NCurses.init_pair(code, fgcolor, bgcolor)\n return code\n end",
"def initialize(color_or_red=nil,green=nil,blue=nil)\n @color=color_or_red\n @color=[color_or_red.to_f,green.to_f,blue.to_f] if color_or_red.is_a? Numeric\n @color=DEFAULT_RGB.merge(color_or_red) if color_or_red.is_a? Hash\n \n end",
"def in_color(text, color)\n # TODO: Disable if the output is not a terminal.\n return text unless enabled?\n # Source of color codes:\n # https://misc.flogisoft.com/bash/tip_colors_and_formatting\n code =\n case color\n when :red then 31\n when :green then 32\n when :yellow then 33\n when :blue then 34\n when :magenta then 35\n when :cyan then 36\n when :light_gray then 37\n when :dark_gray then 90\n when :light_red then 91\n when :light_green then 92\n when :light_yellow then 93\n when :light_blue then 94\n when :light_magenta then 95\n when :light_cyan then 96\n when :white then 97\n when :default_background then 49\n when :black_background then 40\n when :red_background then 41\n when :green_background then 42\n when :yellow_background then 43\n when :blue_background then 44\n when :magenta_background then 45\n when :cyan_background then 46\n when :light_gray_background then 47\n when :dark_gray_background then 100\n when :light_red_background then 101\n when :light_green_background then 102\n when :light_yellow_background then 103\n when :light_blue_background then 104\n when :light_magenta_background then 105\n when :light_cyan_background then 106\n when :white_background then 107\n else raise \"Unrecognized color: #{color}\"\n end\n \"\\e[#{code}m#{text}\\e[0m\"\n end",
"def system_color\n return Color.new(192, 224, 255)\n end",
"def initialize(color, conf=\"bbbbbbbbbbbb wwwwwwwwwwww\")\n @must_learn = true\n @color = color.to_s\n set_conf(conf)\n end",
"def initialize( text, icon1, icon2, color, data )\n\t\t\tsuper( text, icon1, icon2, data )\n\t\t\t@color = color\n\t\tend",
"def output_color(text, color=text.to_i)\r\n # Color matches: 1 - Black; 2 - White; 3 - Red; 4 - Yellow; 5 - Green; 6 - Blue; 7 - Gold\r\n colors = { 1 => 30, 2 => 36, 3 => 31, 4 => 33, 5 => 35, 6 => 34, 7 => 220 }\r\n # \\e[47m Is for the grey foreground \\e[{color} is for picking the color and \\e[0m is for resetting the terminal.\r\n \"\\e[1m\\e[47m\\e[#{colors[color]}m#{text}\\e[0m\\e[22m\"\r\n end",
"def to_color color\n \"\\x1b[#{COLOR_CODE[color]}m#{to_s}\\x1b[m\"\n end",
"def color(color_code, str)\n tty? ? str : \"\\033[#{color_code}m#{str}\\033[0m\"\n end",
"def initialize(color, position)\n self.color = color\n self.position = position\n end",
"def setcolorrep(*)\n super\n end",
"def initialize(color = \"white\", name = \"n\")\n @color = COLORS[color]\n @name = name\n @selected = false\n end",
"def set(*args)\n val = Color.parse(*args)\n unless val.nil?\n self.r = val.r\n self.g = val.g\n self.b = val.b\n self.a = val.a\n end\n self\n end",
"def color(text, color)\n if COLORS[color]\n \"#{start_color color}#{text}#{reset_color}\"\n end\n end",
"def parse_colors(s)\n\n line = \"\"\n\n s.each_char do |c|\n line.concat(@colors[c]) if @colors.has_key?(c)\n line.concat(\" \")\n end\n\n line.concat(\"\\033[0m\")\n end",
"def color(*args)\n @instructions << Instruction.new(:color, args)\n self\n end",
"def colorize( params )\n return self unless STDOUT.isatty\n return self if self.frozen?\n\n begin\n require 'Win32/Console/ANSI' if RUBY_PLATFORM =~ /win32/\n rescue LoadError\n raise 'You must gem install win32console to use colorize on Windows'\n end\n\n color_parameters = {}\n\n if (params.instance_of?(Hash))\n color_parameters[:color] = COLORS[params[:color]]\n color_parameters[:background] = COLORS[params[:background]]\n color_parameters[:mode] = MODES[params[:mode]]\n elsif (params.instance_of?(Symbol))\n color_parameters[:color] = COLORS[params]\n end\n\n color_parameters[:color] ||= @color ||= COLORS[:default]\n color_parameters[:background] ||= @background ||= COLORS[:default]\n color_parameters[:mode] ||= @mode ||= MODES[:default]\n\n color_parameters[:uncolorized] ||= @uncolorized ||= self.dup\n\n # calculate bright mode\n color_parameters[:color] += 50 if color_parameters[:color] > 10\n\n color_parameters[:background] += 50 if color_parameters[:background] > 10\n\n \"\\033[#{color_parameters[:mode]};#{color_parameters[:color]+30};#{color_parameters[:background]+40}m#{color_parameters[:uncolorized]}\\033[0m\".set_color_parameters( color_parameters )\n end",
"def setup\n super\n @background_colour = @colour\n @foreground_colour = Palette.white\n end",
"def initialize color, size\n @color = color\n @size = size\n end",
"def set_color\n self.color = [\"#7AD8E5\", \"#63b4d1\", \"6da7d3\", \"#7699d4\", \"#816ec4\", \"#8658bc\", \"#602278\", \"#34023C\"].sample\n end"
] | [
"0.73090994",
"0.7153503",
"0.71362793",
"0.7011966",
"0.6879383",
"0.67821366",
"0.6780507",
"0.6735075",
"0.67218095",
"0.6625582",
"0.6613872",
"0.66080594",
"0.65918624",
"0.65684193",
"0.6524045",
"0.65139604",
"0.6511712",
"0.6473946",
"0.63969326",
"0.6395496",
"0.63700324",
"0.63346475",
"0.6333756",
"0.6318774",
"0.6305061",
"0.62621766",
"0.62554526",
"0.6244338",
"0.6243098",
"0.6239142",
"0.6224723",
"0.6219008",
"0.62145424",
"0.6214131",
"0.62118095",
"0.6209419",
"0.6201877",
"0.61944795",
"0.61892635",
"0.6183873",
"0.6173243",
"0.61664975",
"0.6164228",
"0.6160842",
"0.61392635",
"0.6133713",
"0.6129666",
"0.6129188",
"0.61267936",
"0.61134815",
"0.6110663",
"0.61073005",
"0.60873836",
"0.6060728",
"0.60557854",
"0.6049923",
"0.6034048",
"0.6030386",
"0.60228235",
"0.6018792",
"0.60170174",
"0.60102063",
"0.60102063",
"0.6001915",
"0.6001915",
"0.5994643",
"0.59891033",
"0.59891033",
"0.598649",
"0.59859765",
"0.59741575",
"0.5973477",
"0.59693766",
"0.59687984",
"0.59650344",
"0.5963767",
"0.5961225",
"0.5960666",
"0.5954109",
"0.5952938",
"0.5946395",
"0.5934658",
"0.5932632",
"0.5932383",
"0.5914066",
"0.59134805",
"0.59046155",
"0.5901507",
"0.58941424",
"0.5891971",
"0.58855164",
"0.5878493",
"0.5870434",
"0.58632475",
"0.58600914",
"0.58565515",
"0.5848745",
"0.58471084",
"0.5845517",
"0.5844294",
"0.5843266"
] | 0.0 | -1 |
Disable coloring of this terminal session | def disable!
@enabled = false
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def no_color\n add option: \"-no-color\"\n end",
"def reset\n # color is enabled by default, can be turned of by switch --no-color\n Term::ANSIColor.coloring = true\n end",
"def no_color\n reset_prev_formatting self, :color\n end",
"def disable_color\n return translate_color(7)\n end",
"def raw_no_echo_mode\n FFI::NCurses.initscr\n FFI::NCurses.cbreak\n end",
"def disable_colorization=(value); end",
"def set_bg\n STDOUT.write \"\\033[48;5;#{to_xterm}m\"\n end",
"def colorNormal\n puts \"\\033[0m\"\n end",
"def reset\n STDOUT.write \"\\033[0m\"\n end",
"def strip_color\n return self.gsub(/\\e\\[0;[39]\\d;49m/, '').gsub(/\\e\\[0m/, '')\n end",
"def echo_off\n system \"stty -echo\"\n end",
"def off\n %x{stty -raw} if TTY::Platform.unix?\n end",
"def disable_colorization=(value)\n @disable_colorization = (value || false)\n end",
"def set_fg\n STDOUT.write \"\\033[38;5;#{to_xterm}m\"\n end",
"def turn_off\n STDOUT.puts \"\\e]2;\\a\"\n end",
"def safe_colorize_deactive\n CLIColorize.off\n end",
"def reset_color\n \"\\e[#{COLORS[:reset]}m\"\n end",
"def raw_no_echo_mode\n @state = `stty -g`\n system \"stty raw -echo cbreak isig\"\n end",
"def off\n %x{stty -raw} rescue nil\n end",
"def no_color pry=(defined?(_pry_) && _pry_) || Pry\n boolean = pry.config.color\n pry.config.color = false\n yield\n ensure\n pry.config.color = boolean\n end",
"def raw_no_echo_mode\n @state = `stty -g`\n system \"stty raw -echo -icanon isig\"\n end",
"def disable_colorization(value = T.unsafe(nil)); end",
"def reset_colors\n @color_output = false\n\n #Term::ANSIColor.coloring = true\n c = Term::ANSIColor\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n end",
"def no_colors\n @style = {\n :title => nil,\n :header => nil,\n :value => nil\n }\n @no_colors = true\n end",
"def off\n color(:passive)\n end",
"def reset_colors\n @color_output ||= true\n\n # Build the default colors\n Term::ANSIColorHI.coloring = color_output\n c = Term::ANSIColorHI\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n @screen_clear = \"\\e[H\\e[2J\"\n end",
"def reset_use_color\n @use_color = true\n end",
"def no_bg_color\n reset_prev_formatting self, :bg_color\n end",
"def remove_colors\n gsub(/\\e\\[\\d+m/, '')\n end",
"def strip_color\n gsub(COLOR_REGEXP, '')\n end",
"def raw_no_echo_mode\n @state = Termios.getattr(@input)\n new_settings = @state.dup\n new_settings.c_lflag &= ~(Termios::ECHO | Termios::ICANON)\n new_settings.c_cc[Termios::VMIN] = 1\n Termios.setattr(@input, Termios::TCSANOW, new_settings)\n end",
"def decolorize!\n gsub!(/\\e\\[\\d+[;\\d]*m/, '')\n self\n end",
"def reset_terminal\n # Reset the terminal to a useable state (undo all changes).\n # '\\e[?7h': Re-enable line wrapping.\n # '\\e[?25h': Unhide the cursor.\n # '\\e[2J': Clear the terminal.\n # '\\e[;r': Set the scroll region to its default value.\n # Also sets cursor to (0,0).\n # '\\e[?1049l: Restore main screen buffer.\n print \"\\e[?7h\\e[?25h\\e[2J\\e[;r\\e[?1049l\"\n\n # Show user input.\n system 'stty echo'\nend",
"def start_color\n \"\\033[\"\n end",
"def no_color(&block)\n block.call\n end",
"def color_enabled?\n # if not set, use tty to check\n return $stdout.tty? unless instance_variable_defined?(:@disable_color)\n\n !@disable_color\n end",
"def remove_black_color(env)\n node = env[:node]\n return unless node.element?\n return unless node.attr('style').present?\n node['style'] = node['style'].gsub(/(?<!background-)(color:#000000;?)/, '')\n end",
"def red\n colorize \"\\033[31m\"\n end",
"def strip_color(text)\n text.to_s.gsub(/(\\001)?\\e\\[.*?(\\d)+m(\\002)?/, '')\n end",
"def strip_color_codes(text)\n text.gsub(/\\e\\[(\\d+)(;\\d+)*m/, '')\n end",
"def decolorize!\n gsub! /\\e\\[\\d+[;\\d]*m/, ''\n self\n end",
"def in_xterm_state(options = {})\n output.echo_off\n output.enable_alternate_screen if options[:full] || options[:alternate_screen]\n output.enable_mouse if options[:full] || options[:mouse]\n output.hide_cursor if options[:full] || options[:no_cursor]\n output.enable_utf8 if options[:utf8]\n output.enable_focus_events\n output.enable_resize_events\n output.clear\n\n yield self\n ensure\n output.reset_all\n output.disable_utf8 if options[:utf8]\n if options[:full] || options[:alternate_screen]\n output.reset_color\n output.clear\n output.disable_alternate_screen\n end\n end",
"def clear_screen!\n print \"\\e[2J\"\n end",
"def colored_prompt\n return (Readline::VERSION !~ /EditLine/) && Pry.color if @colored_prompt.nil?\n\n @colored_prompt\n end",
"def disabled_color\n return Color.new(255, 255, 255, 128)\n end",
"def reset!\n @color = @@colors[:white]\n end",
"def clear_screen\n CSI + '2J'\n end",
"def disable_log_to_screen\n @log_to_screen = false\n end",
"def partial_reset_terminal\n # Reset the terminal to a useable state (undo all changes).\n # '\\e[?7h': Re-enable line wrapping.\n # '\\e[?25h': Unhide the cursor.\n # '\\e[2J': Clear the terminal.\n # '\\e[;r': Set the scroll region to its default value.\n # Also sets cursor to (0,0).\n # '\\e[?1049l: Restore main screen buffer.\n print \"\\e[?7h\\e[?25h\\e[;r\\e[?1049l\"\n\n # Show user input.\n system 'stty echo'\nend",
"def clear_terminal\n system(\"cls\") || system(\"clear\")\nend",
"def clean_terminal\r\n Gem.win_platform? ? (system \"cls\") : (system \"clear\")\r\n end",
"def clear_screen\n puts \"\\e[H\\e[2J\"\n end",
"def clear_screen\n print \"\\e[2J\"\nend",
"def std_colors\n FFI::NCurses.use_default_colors\n # 2018-03-17 - changing it to ncurses defaults\n FFI::NCurses.init_pair(0, FFI::NCurses::BLACK, -1)\n FFI::NCurses.init_pair(1, FFI::NCurses::RED, -1)\n FFI::NCurses.init_pair(2, FFI::NCurses::GREEN, -1)\n FFI::NCurses.init_pair(3, FFI::NCurses::YELLOW, -1)\n FFI::NCurses.init_pair(4, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(5, FFI::NCurses::MAGENTA, -1)\n FFI::NCurses.init_pair(6, FFI::NCurses::CYAN, -1)\n FFI::NCurses.init_pair(7, FFI::NCurses::WHITE, -1)\n # ideally the rest should be done by application\n #FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, -1)\n #FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n\n FFI::NCurses.init_pair(14, FFI::NCurses::WHITE, FFI::NCurses::CYAN)\n=begin\n FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::GREEN)\n FFI::NCurses.init_pair(11, FFI::NCurses::BLACK, FFI::NCurses::YELLOW)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n FFI::NCurses.init_pair(14, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(15, FFI::NCurses::BLACK, FFI::NCurses::WHITE)\n=end\n end",
"def strip_color(text)\n text.to_s.gsub(/(\\001)?\\e\\[.*?(\\d)+m(\\002)?/ , '')\n end",
"def clear_terminal\n RUBY_PLATFORM =~ /win32|win64|\\.NET|windows|cygwin|mingw32/i ? system('cls') : system('clear')\n end",
"def reset_screen\n print \"\\e[2J\\e[H\"\nend",
"def clear_screen\n print \"\\e[2J\\e[f\"\nend",
"def clear_screen\n \"\\e[2J\\e[H\"\nend",
"def clear_screen\n print \"\\e[2J\"\nend",
"def clear_screen\n print \"\\e[2J\"\nend",
"def reset!\n # Attributes affected by mode changes\n @gutter_attr = AnsiTerm::Attr.new(flags: 0, bgcol: [\n 48,2,*(adjust_color(background_color, 0.8) || GUTTER)])\n \n @cursor_attr = AnsiTerm::Attr.new(\n bgcol: [48,2,*hexcol(get_style_option(\"cursor\", :bg, default: \"#802080\"))],\n fgcol: [38,2,*hexcol(get_style_option(\"cursor\", :fg, default: \"#ffffff\"))],\n flags: nil\n )\n\n @moderender.mode = @editor.mode\n @moderender.buffer = @editor.buffer\n @moderender.reset!\n end",
"def strip_colors\n self.class.new self.raw.gsub(/\\x03(?:[019]?[0-9](?:,[019]?[0-9])?)?/, \"\")\n end",
"def setup_terminal\n # Setup the terminal for the TUI.\n # '\\e[?1049h': Use alternative screen buffer. smcup\n # '\\e[?7l': Disable line wrapping.\n # '\\e[?25l': Hide the cursor.\n # '\\e[2J': Clear the screen.\n # '\\e[1;Nr': Limit scrolling to scrolling area.\n # Also sets cursor to (0,0).\n # printf(\"\\e[?1049h\\e[?7l\\e[?25l\\e[2J\\e[1;%sr\", @glines)\n # 2019-03-29 - XXX temporarily not hiding cursor to see if we can place it.\n printf(\"\\e[?1049h\\e[?7l\\e[?25h\\e[2J\\e[1;%sr\", @glines)\n # earlier glines was grows\n\n # Hide echoing of user input\n system 'stty -echo'\nend",
"def clear_terminal\n Gem.win_platform? ? (system 'cls') : (system 'clear')\nend",
"def before_exit\n system 'tput reset; stty icanon; stty echo; clear'\n end",
"def strip_color_codes(text); end",
"def cyan; if @options[:colors]; \"\\e[1;36m\" else \"\" end end",
"def turn_off_readline\n @readline_supported = false\n self\n end",
"def raw_no_echo_mode\n end",
"def reset_color_scheme\n self.color_scheme = nil\n end",
"def color?\n tty?\n end",
"def color(line)\n if STDOUT.isatty and ENV['TERM'].to_s.include? 'color'\n puts \"\\n\\u001b[35;1m#{line}\\u001b[0m\"\n else\n puts \"\\n\" + line\n end\nend",
"def strip_ansi(str)\n str.gsub(ANSI, '')\n end",
"def yellow\n colorize \"\\033[33m\"\n end",
"def rouge_gras\n \"\\033[1;31m#{self}\\033[0m\"\n end",
"def disable_stdout\n @old_stdout = STDOUT.dup\n # via Tomas Matousek, http://www.ruby-forum.com/topic/205887\n STDOUT.reopen(::RUBY_PLATFORM =~ /djgpp|(cyg|ms|bcc)win|mingw/ ? 'NUL' : '/dev/null')\n end",
"def clear_screen\n\tputs \"\\e[2J\" \n print \"\\e[H\"\nend",
"def clear\n Vedeu::Terminal.clear\n end",
"def disable_stdout\n @old_stdout = STDOUT.dup\n STDOUT.reopen(PLATFORM =~ /mswin/ ? \"NUL\" : \"/dev/null\")\n end",
"def hide\n ConsoleGlitter.escape(\"?25l\")\n end",
"def color_themes # :nologin:\n end",
"def activate_256_colors\n require 'ruco/file_store'\n (\n # not windows\n RbConfig::CONFIG['host_os'] !~ /mswin|mingw/ and\n\n # possible to open xterm-256color\n ['xterm', 'xterm-256color'].include?(ENV['TERM']) and\n Ruco::FileStore.new('~/.ruco/cache').cache('color_possible'){\n system(%{TERM=xterm-256color ruby -r curses -e 'Curses.noecho' > /dev/null 2>&1})\n }\n\n # finally switch terminal, so curses knows we want colors\n ) and ENV['TERM'] = 'xterm-256color'\nend",
"def reset_screen_clearing\n @clear_screen = false\n @clear_screen_code = \"\\e[H\\e[2J\"\n end",
"def print_colorized_if_tty(text, color=nil)\n CLIColorize.print_colorized_if_tty(text, color)\n end",
"def silence!\n IO.console.raw!\n end",
"def bleu_gras\n \"\\033[1;96m#{self}\\033[0m\"\n end",
"def clear\n puts \"\\n\" # pour certaines méthodes\n puts \"\\033c\"\nend",
"def initialize\n FFI::NCurses.initscr\n #FFI::NCurses.start_color\n FFI::NCurses.curs_set(0)\n FFI::NCurses.nodelay(FFI::NCurses.stdscr, true)\n FFI::NCurses.cbreak\n FFI::NCurses.raw\n FFI::NCurses.noecho\n FFI::NCurses.start_color\n FFI::NCurses.clear\n end",
"def clearScreen\n puts \"\\e[H\\e[2J\"\nend",
"def raw_no_echo_mode; end",
"def raw_no_echo_mode; end",
"def uncolorize\n @uncolorized || self\n end",
"def rouge\n \"\\033[0;91m#{self}\\033[0m\"\n end",
"def print_clear; print \"\\e[0J\" end",
"def usecolor\n return @usecolor if defined? @usecolor\n if @cmdobj.settings.has_key? 'nocolor' then @usecolor = false\n elsif @cmdobj.settings.has_key? 'mailto' then @usecolor = false\n elsif not @colorok then @usecolor = false\n else @usecolor = $stdout.tty?\n end\n #debug \"Use color? #{@usecolor}\"\n return @usecolor\n end",
"def uncolor(string)\n Style.uncolor(string)\n end",
"def clear_screen\r\n RUBY_PLATFORM =~ /cygwin|mswin|mingw|bccwin|wince|emx/ ? system(\"cls\") : system(\"clear\")\r\n end",
"def raw_mode!\n Vedeu.log(\"Terminal switching to 'raw' mode\")\n\n @_mode = :raw\n end",
"def clear_screen\n output(Esc.string('clear'))\n end",
"def bleu\n \"\\033[0;96m#{self}\\033[0m\"\n # 96=bleu clair, 93 = jaune, 94/95=mauve, 92=vert\n end"
] | [
"0.74163955",
"0.72037435",
"0.70543873",
"0.6912874",
"0.6859117",
"0.6782006",
"0.66943157",
"0.66922075",
"0.66635287",
"0.66455126",
"0.66303605",
"0.6624913",
"0.66145134",
"0.6604859",
"0.65819734",
"0.65725183",
"0.6564395",
"0.6548926",
"0.6541174",
"0.65066266",
"0.6462101",
"0.64490145",
"0.6427462",
"0.6385657",
"0.6348339",
"0.6324514",
"0.6312987",
"0.63081336",
"0.6242296",
"0.62292653",
"0.6194045",
"0.61689013",
"0.6167841",
"0.6150255",
"0.6131017",
"0.61229837",
"0.6052781",
"0.60297173",
"0.59867465",
"0.5985076",
"0.5964352",
"0.5963114",
"0.59535205",
"0.5951445",
"0.5920032",
"0.59105706",
"0.5901803",
"0.5896656",
"0.5886421",
"0.5880398",
"0.58661395",
"0.58641464",
"0.5852656",
"0.5847005",
"0.5842076",
"0.5836681",
"0.58363056",
"0.5836229",
"0.5827607",
"0.5813292",
"0.5813292",
"0.58081514",
"0.5803808",
"0.5801994",
"0.5796872",
"0.57691616",
"0.57684857",
"0.5764551",
"0.57599264",
"0.5759536",
"0.575889",
"0.5741863",
"0.57332575",
"0.5731726",
"0.5703168",
"0.5661264",
"0.5659086",
"0.5645892",
"0.5635978",
"0.5633259",
"0.5632597",
"0.5628104",
"0.5613083",
"0.56118006",
"0.5610416",
"0.5606952",
"0.56053823",
"0.55999184",
"0.5593549",
"0.55926037",
"0.5556643",
"0.5556643",
"0.55428034",
"0.5536318",
"0.55209255",
"0.55032516",
"0.5482258",
"0.54811317",
"0.5437344",
"0.5431937",
"0.5431531"
] | 0.0 | -1 |
Detect terminal color support | def supports?
return false unless $stdout.tty?
return false if ENV['TERM'] == 'dumb'
if ENV['TERM'] =~ /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i
return true
end
return true if ENV.include?('COLORTERM')
true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def detect_colour_mode\n case ENV['TERM']\n when /-truecolor$/ then 16_777_216\n when /-256color$/, 'xterm' then 256\n when /-color$/, 'rxvt' then 16\n else 256\n end\n end",
"def detect_colour_mode\n case ENV['TERM']\n when /-truecolor$/ then 16_777_216\n when /-256color$/, 'xterm' then 256\n when /-color$/, 'rxvt' then 16\n else 256\n end\n end",
"def color?\n tty?\n end",
"def detect_mode\n if ENV['NO_COLOR'] # see https://no-color.org/\n 0\n elsif RbConfig::CONFIG['host_os'] =~ /mswin|mingw/ # windows\n if ENV['ANSICON']\n 16\n elsif ENV['ConEmuANSI'] == 'ON'\n TRUE_COLOR\n else\n 0\n end\n elsif ENV['TERM_PROGRAM'] == 'Apple_Terminal'\n 256\n else\n case ENV['TERM']\n when /^rxvt-(?:.*)-256color$/\n 256\n when /-color$/, /^rxvt/\n 16\n else # optimistic default\n TRUE_COLOR\n end\n end\n end",
"def color?\n Chef::Config[:color] && stdout.tty?\n end",
"def is_color_printing_supported\n return @is_color_printing_supported\n end",
"def color?\n if @options[:color].nil?\n @options[:outdev] ? @options[:outdev].tty? : true\n else\n @options[:color]\n end\n end",
"def color?\n $options[:color].nil? ? @outdev.tty? : $options[:color]\n end",
"def color?\n ##Chef::Config[:color] && stdout.tty? && !Chef::Platform.windows?\n :red\n end",
"def color_support?\n !(RbConfig::CONFIG['host_os'] =~ /mswin|mingw/) || ENV['ANSICON']\n end",
"def color_enabled?\n if @color_enabled.nil?\n if RbConfig::CONFIG['target_os'] =~ /mswin|mingw/i\n if ENV['ANSICON']\n @color_enabled = true\n else\n begin\n require 'rubygems' unless ENV['NO_RUBYGEMS']\n require 'Win32/Console/ANSI'\n @color_enabled = true\n rescue LoadError\n @color_enabled = false\n info \"You must 'gem install win32console' to use color on Windows\"\n end\n end\n else\n @color_enabled = true\n end\n end\n\n @color_enabled\n end",
"def color?\n if color == :auto\n return $stdout.tty?\n end\n\n # ensure true on misconfigured value\n return color != :never\n end",
"def from_tput\n return NoValue if !TTY::Color.command?(\"tput colors\")\n\n cmd = %q(tput colors 2>/dev/null)\n `#{cmd}`.to_i > 2\n rescue Errno::ENOENT\n NoValue\n end",
"def is_color_printing_supported=(value)\n @is_color_printing_supported = value\n end",
"def color_enabled?\n # if not set, use tty to check\n return $stdout.tty? unless instance_variable_defined?(:@disable_color)\n\n !@disable_color\n end",
"def activate_256_colors\n require 'ruco/file_store'\n (\n # not windows\n RbConfig::CONFIG['host_os'] !~ /mswin|mingw/ and\n\n # possible to open xterm-256color\n ['xterm', 'xterm-256color'].include?(ENV['TERM']) and\n Ruco::FileStore.new('~/.ruco/cache').cache('color_possible'){\n system(%{TERM=xterm-256color ruby -r curses -e 'Curses.noecho' > /dev/null 2>&1})\n }\n\n # finally switch terminal, so curses knows we want colors\n ) and ENV['TERM'] = 'xterm-256color'\nend",
"def scan_for_colors; end",
"def in_color(text, color)\n # TODO: Disable if the output is not a terminal.\n return text unless enabled?\n # Source of color codes:\n # https://misc.flogisoft.com/bash/tip_colors_and_formatting\n code =\n case color\n when :red then 31\n when :green then 32\n when :yellow then 33\n when :blue then 34\n when :magenta then 35\n when :cyan then 36\n when :light_gray then 37\n when :dark_gray then 90\n when :light_red then 91\n when :light_green then 92\n when :light_yellow then 93\n when :light_blue then 94\n when :light_magenta then 95\n when :light_cyan then 96\n when :white then 97\n when :default_background then 49\n when :black_background then 40\n when :red_background then 41\n when :green_background then 42\n when :yellow_background then 43\n when :blue_background then 44\n when :magenta_background then 45\n when :cyan_background then 46\n when :light_gray_background then 47\n when :dark_gray_background then 100\n when :light_red_background then 101\n when :light_green_background then 102\n when :light_yellow_background then 103\n when :light_blue_background then 104\n when :light_magenta_background then 105\n when :light_cyan_background then 106\n when :white_background then 107\n else raise \"Unrecognized color: #{color}\"\n end\n \"\\e[#{code}m#{text}\\e[0m\"\n end",
"def use_color?\n use_color\n end",
"def color(line)\n if STDOUT.isatty and ENV['TERM'].to_s.include? 'color'\n puts \"\\n\\u001b[35;1m#{line}\\u001b[0m\"\n else\n puts \"\\n\" + line\n end\nend",
"def std_colors\n FFI::NCurses.use_default_colors\n # 2018-03-17 - changing it to ncurses defaults\n FFI::NCurses.init_pair(0, FFI::NCurses::BLACK, -1)\n FFI::NCurses.init_pair(1, FFI::NCurses::RED, -1)\n FFI::NCurses.init_pair(2, FFI::NCurses::GREEN, -1)\n FFI::NCurses.init_pair(3, FFI::NCurses::YELLOW, -1)\n FFI::NCurses.init_pair(4, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(5, FFI::NCurses::MAGENTA, -1)\n FFI::NCurses.init_pair(6, FFI::NCurses::CYAN, -1)\n FFI::NCurses.init_pair(7, FFI::NCurses::WHITE, -1)\n # ideally the rest should be done by application\n #FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, -1)\n #FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n\n FFI::NCurses.init_pair(14, FFI::NCurses::WHITE, FFI::NCurses::CYAN)\n=begin\n FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::GREEN)\n FFI::NCurses.init_pair(11, FFI::NCurses::BLACK, FFI::NCurses::YELLOW)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n FFI::NCurses.init_pair(14, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(15, FFI::NCurses::BLACK, FFI::NCurses::WHITE)\n=end\n end",
"def scan_for_colors\n scan(/\\033\\[([0-9;]+)m(.+?)\\033\\[0m|([^\\033]+)/m).map do |match|\n split_colors(match)\n end\n end",
"def scan_for_colors\n scan(/\\033\\[([0-9;]+)m(.+?)\\033\\[0m|([^\\033]+)/m).map do |match|\n split_colors(match)\n end\n end",
"def supports_rgb_color?\n true\n end",
"def usecolor\n return @usecolor if defined? @usecolor\n if @cmdobj.settings.has_key? 'nocolor' then @usecolor = false\n elsif @cmdobj.settings.has_key? 'mailto' then @usecolor = false\n elsif not @colorok then @usecolor = false\n else @usecolor = $stdout.tty?\n end\n #debug \"Use color? #{@usecolor}\"\n return @usecolor\n end",
"def colored?\n cp2pkh? || cp2sh?\n end",
"def using_color_scheme?\n true if @color_scheme\n end",
"def start_color\n \"\\033[\"\n end",
"def colored_prompt\n return (Readline::VERSION !~ /EditLine/) && Pry.color if @colored_prompt.nil?\n\n @colored_prompt\n end",
"def check_color\n state = read_state\n return state[:color] != COLOR_OFF\n end",
"def contains_color?\n self[COLOR_REGEXP]\n end",
"def print_colorized_if_tty(text, color=nil)\n CLIColorize.print_colorized_if_tty(text, color)\n end",
"def ansi? ; @ansi ; end",
"def system_color\n return Color.new(255,255,0)\n end",
"def color(color_code)\n colors = color_code.scan(/\\d+/)\n\n # Extended set foreground x-term color\n if colors[0] == \"38\" && colors[1] == \"5\"\n return @fg_color = \"term-fgx#{colors[2]}\"\n end\n\n # Extended set background x-term color\n if colors[0] == \"48\" && colors[1] == \"5\"\n return @bg_color = \"term-bgx#{colors[2]}\"\n end\n\n # If multiple colors are defined, i.e. \\e[30;42m\\e then loop through each\n # one, and assign it to @fg_color or @bg_color\n colors.each do |cc|\n c_integer = cc.to_i\n\n # Reset all styles\n if c_integer == 0\n @fg_color = nil\n @bg_color = nil\n @other_colors = []\n\n # Primary (default) font\n elsif c_integer == 10\n # no-op\n\n # Turn off bold / Normal color or intensity (21 & 22 essentially do the same thing)\n elsif c_integer == 21 || c_integer == 22\n @other_colors.delete(\"term-fg1\")\n @other_colors.delete(\"term-fg2\")\n\n # Turn off italic\n elsif c_integer == 23\n @other_colors.delete(\"term-fg3\")\n\n # Turn off underline\n elsif c_integer == 24\n @other_colors.delete(\"term-fg4\")\n\n # Turn off crossed-out\n elsif c_integer == 29\n @other_colors.delete(\"term-fg9\")\n\n # Reset foreground color only\n elsif c_integer == 39\n @fg_color = nil\n\n # Reset background color only\n elsif c_integer == 49\n @bg_color = nil\n\n # 30–37, then it's a foreground color\n elsif c_integer >= 30 && c_integer <= 37\n @fg_color = \"term-fg#{cc}\"\n\n # 40–47, then it's a background color.\n elsif c_integer >= 40 && c_integer <= 47\n @bg_color = \"term-bg#{cc}\"\n\n # 90-97 is like the regular fg color, but high intensity\n elsif c_integer >= 90 && c_integer <= 97\n @fg_color = \"term-fgi#{cc}\"\n\n # 100-107 is like the regular bg color, but high intensity\n elsif c_integer >= 100 && c_integer <= 107\n @fg_color = \"term-bgi#{cc}\"\n\n # 1-9 random other styles\n elsif c_integer >= 1 && c_integer <= 9\n @other_colors << \"term-fg#{cc}\"\n end\n end\n end",
"def system_color\n return Color.new(192, 224, 255)\n end",
"def color?\n false\n end",
"def color?\n @color\n end",
"def rgb?\n colorspace == \"rgb\"\n end",
"def colorNormal\n puts \"\\033[0m\"\n end",
"def color(color, str)\n raise \"[BUG] Unrecognized color #{color}\" unless COLORS[color]\n\n # Almost any real Unix terminal will support color,\n # so we just filter for Windows terms (which don't set TERM)\n # and not-real terminals, which aren't ttys.\n return str if ENV[\"TERM\"].nil? || ENV[\"TERM\"].empty? || !STDOUT.tty?\n return \"\\e[#{COLORS[color]}m#{str}\\e[0m\"\n end",
"def red\n colorize \"\\033[31m\"\n end",
"def red?\n not black?\n end",
"def hsla_color; end",
"def color_valid?(_clr)\n true\n end",
"def color_valid?(_clr)\n true\n end",
"def possible_colors\n %w(R G B Y)\n end",
"def color_modes\n return @color_modes\n end",
"def test_does_not_colorize_when_tty_method_not_defined_and_SSHKIT_COLOR_not_present\n color = SSHKit::Color.new(stub(), {})\n assert_equal 'hi', color.colorize('hi', :red)\n end",
"def open?\n @color == @@colors[:cyan]\n end",
"def colors?\n scan(/%([a-z]+)/).each do\n return true if Regexp.last_match(1).validate_color\n end\n false\n end",
"def color?\n !@color.nil?\n end",
"def bash_color_codes(string)\n string.gsub(\"\\e[0m\", '</span>').\n gsub(\"\\e[31m\", '<span class=\"color31\">').\n gsub(\"\\e[32m\", '<span class=\"color32\">').\n gsub(\"\\e[33m\", '<span class=\"color33\">').\n gsub(\"\\e[34m\", '<span class=\"color34\">').\n gsub(\"\\e[35m\", '<span class=\"color35\">').\n gsub(\"\\e[36m\", '<span class=\"color36\">').\n gsub(\"\\e[37m\", '<span class=\"color37\">')\n end",
"def colorize *args\n $terminal.color(*args)\nend",
"def kiosk_mode_require_color_inversion\n return @kiosk_mode_require_color_inversion\n end",
"def is_valid_color?(color_string)\n COLORS.include?(color_string)\n end",
"def cyan; if @options[:colors]; \"\\e[1;36m\" else \"\" end end",
"def get_color_code\n\t\t{ r: @term_hex[0], g: @term_hex[1], b: @term_hex[2], alpha: @term_hex[-1] }\n\tend",
"def windows_ansi?\n defined?(Win32::Console) || ENV['ANSICON']\n end",
"def list_colors\n color_string = \"\\nThe available colors are \"\n COLORS.each_with_index do |color, index|\n if index%2 == 0\n color_string += \"\\n\"\n end\n color_string += color + \" \"\n end\n puts color_string\n end",
"def output_color(text, color=text.to_i)\r\n # Color matches: 1 - Black; 2 - White; 3 - Red; 4 - Yellow; 5 - Green; 6 - Blue; 7 - Gold\r\n colors = { 1 => 30, 2 => 36, 3 => 31, 4 => 33, 5 => 35, 6 => 34, 7 => 220 }\r\n # \\e[47m Is for the grey foreground \\e[{color} is for picking the color and \\e[0m is for resetting the terminal.\r\n \"\\e[1m\\e[47m\\e[#{colors[color]}m#{text}\\e[0m\\e[22m\"\r\n end",
"def windows_ansi?\n defined?(Win32::Console) || ENV['ANSICON'] || (windows? && mri_2?)\n end",
"def colors\n color_codes.keys\n end",
"def hex?(color_str)\n !!(color_str.to_s.downcase.strip =~ HEX_REGEX)\n end",
"def colorize(*args)\n shell.set_color(*args)\n end",
"def isColor(c)\n if c == \"r\" or c == \"b\" or c == \"g\" or c == \"y\" or c == \"c\" or c == \"m\" then\n return true\n else\n return false\n end\nend",
"def cmyk?\n colorspace == \"cmyk\"\n end",
"def determine_color_scheme\n @default_options.color_scheme = @highline.choose do |menu|\n menu.layout = :one_line\n menu.select_by = :name\n menu.header = nil\n menu.prompt = \"What color scheme would you like? \"\n menu.choice(\"none\") { :none }\n menu.choice(\"dark terminal background\") { :dark_bg }\n menu.choice(\"light terminal background\") { :light_bg }\n end\n end",
"def get_color(key)\n if key.is_a? String\n color = key\n elsif Wirb::COLORS.key?(key)\n color = Wirb::COLORS[key]\n end\n\n color ? \"\\033[#{ color }m\" : ''\n end",
"def auxiliary_colour\n @cr[0xe] >> 4\n end",
"def black?\n @color == :black\n end",
"def print_colors\n 1.upto(6) { |i| print \"#{i} = \" + \"\\u2b24\".color(COLORS[i]) + \" \" }\n print \": \"\nend",
"def yellow\n colorize \"\\033[33m\"\n end",
"def colorized?; end",
"def has_colors?(data)\n data.match(/\\x1B\\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]/)\n end",
"def rgb\n if Configuration.colour_mode == 16777216\n sprintf(\"\\e[38;2;%s;%s;%sm\", *css_to_rgb)\n\n else\n numbered\n\n end\n end",
"def fancy_color_methods\n self.term_colorizer_methods\n end",
"def parse_colors(s)\n\n line = \"\"\n\n s.each_char do |c|\n line.concat(@colors[c]) if @colors.has_key?(c)\n line.concat(\" \")\n end\n\n line.concat(\"\\033[0m\")\n end",
"def hexadecimal?\n # Both standard and shorthand (CSS) style hexadecimal color value.\n not cterm? and /\\A#?(?:[0-9a-f]{3}|[0-9a-f]{6})\\z/io.match(@value.to_s)\n end",
"def green\n colorize(32)\n end",
"def red?\n @color == :red\n end",
"def color_channels\n color == 2 ? 3 : 4\n end",
"def context_get_fgcolor()\n return $gimp_iface.gimp_context_get_foreground()[0]\nend",
"def from_curses(curses_class = nil)\n return NoValue if TTY::Color.windows?\n\n require 'curses'\n\n if defined?(Curses)\n curses_class ||= Curses\n curses_class.init_screen\n has_color = curses_class.has_colors?\n curses_class.close_screen\n return has_color\n end\n NoValue\n rescue LoadError\n warn 'no native curses support' if @verbose\n NoValue\n end",
"def in_check?(color)\n end",
"def reset_colors\n @color_output ||= true\n\n # Build the default colors\n Term::ANSIColorHI.coloring = color_output\n c = Term::ANSIColorHI\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n @screen_clear = \"\\e[H\\e[2J\"\n end",
"def color_codes\n {\n :black => 0, :light_black => 60,\n :red => 1, :light_red => 61,\n :green => 2, :light_green => 62,\n :yellow => 3, :light_yellow => 63,\n :blue => 4, :light_blue => 64,\n :magenta => 5, :light_magenta => 65,\n :cyan => 6, :light_cyan => 66,\n :white => 7, :light_white => 67,\n :default => 9\n }\n end",
"def colorize(params)\n return self unless STDOUT.isatty\n\n begin\n require \"Win32/Console/ANSI\" if RUBY_PLATFORM.match?(/win32/)\n rescue LoadError\n raise \"You must gem install win32console to use colorize on Windows\"\n end\n\n color_parameters = {}\n\n if params.instance_of?(Hash)\n color_parameters[:color] = COLORS[params[:color]]\n color_parameters[:background] = COLORS[params[:background]]\n color_parameters[:mode] = MODES[params[:mode]]\n elsif params.instance_of?(Symbol)\n color_parameters[:color] = COLORS[params]\n end\n\n color_parameters[:color] ||= @color ||= COLORS[:default]\n color_parameters[:background] ||= @background ||= COLORS[:default]\n color_parameters[:mode] ||= @mode ||= MODES[:default]\n\n color_parameters[:uncolorized] ||= @uncolorized ||= dup\n\n # calculate bright mode\n color_parameters[:color] += 50 if color_parameters[:color] > 10\n\n color_parameters[:background] += 50 if color_parameters[:background] > 10\n\n \"\\033[#{color_parameters[:mode]};#{color_parameters[:color] + 30};\"\\\n \"#{color_parameters[:background] + 40}m#{color_parameters[:uncolorized]}\\033[0m\"\\\n .color_parameters(color_parameters)\n end",
"def on_49(_) { fg: fg_color(9) } end",
"def red(input)\n puts \"\\e[31m#{input}\\e[0m\"\nend",
"def red(input)\n puts \"\\e[31m#{input}\\e[0m\"\nend",
"def nc\n Ncurses::COLOR_PAIR(@id)\n end",
"def display_colors\n\t\tprint \"\\nColors: \"\n\t\tMastermind::COLORS.each do |color, _color_code|\n\t\t\tunless color == :blank || color == :black || color == :white\n\t\t\t\tcolor_string = color.to_s.capitalize\n\t\t\t\tprint Mastermind::color(\" #{color_string} \", color)\n\t\t\tend\n\t\tend\n\t\tputs \"\\nChoose a color with it's full name or it's first character\"\n\tend",
"def reset_colors\n @color_output = false\n\n #Term::ANSIColor.coloring = true\n c = Term::ANSIColor\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n end",
"def color(color=32)\n printf \"\\033[#{color}m\"\n yield\n printf \"\\033[0m\"\nend",
"def hex?(color_string)\n # MRuby doesn't support regex, otherwise we'd do:\n # !(/^#[0-9A-F]{6}$/i.match(a).nil?)\n color_string.instance_of?(String) &&\n color_string[0] == '#' &&\n color_string.length == 7\n end",
"def red\n colorize(31)\n end",
"def colored(s)\n\tif $stdout.tty?\n\t\t\"\\e[0;36;49m#{s}\\e[0m\"\n\telse\n\t\t\"[#{s}]\"\n\tend\nend",
"def rgb(red, green, blue, background = false)\n case @mode\n when 8\n \"#{background ? 4 : 3}#{rgb_to_ansi(red, green, blue, false)}\"\n when 16\n \"#{background ? 4 : 3}#{rgb_to_ansi(red, green, blue, true)}\"\n when 256\n \"#{background ? 48 : 38}#{rgb_to_256(red, green, blue)}\"\n when TRUE_COLOR\n \"#{background ? 48 : 38}#{rgb_true(red, green, blue)}\"\n end\n end",
"def color(color_code, str)\n tty? ? str : \"\\033[#{color_code}m#{str}\\033[0m\"\n end"
] | [
"0.8104025",
"0.8104025",
"0.79577106",
"0.79177874",
"0.7411239",
"0.7368447",
"0.7349984",
"0.7342547",
"0.73409164",
"0.72787374",
"0.7227641",
"0.7131074",
"0.7084524",
"0.7057505",
"0.70459276",
"0.7007803",
"0.6982087",
"0.69002515",
"0.68771774",
"0.68199635",
"0.6765277",
"0.6733502",
"0.6733502",
"0.67063904",
"0.65963453",
"0.65639985",
"0.65221",
"0.64972",
"0.6468892",
"0.64499193",
"0.6401956",
"0.6319131",
"0.6310135",
"0.6304732",
"0.6286413",
"0.62674826",
"0.62331146",
"0.6227437",
"0.62085426",
"0.6183498",
"0.6154612",
"0.61471325",
"0.61400753",
"0.61302215",
"0.61282855",
"0.61282855",
"0.61274195",
"0.61272365",
"0.61033523",
"0.60943747",
"0.6092254",
"0.6091321",
"0.60745585",
"0.6069911",
"0.6054823",
"0.6047971",
"0.6042127",
"0.6031378",
"0.6022903",
"0.6006389",
"0.60059136",
"0.5998407",
"0.5996612",
"0.5989574",
"0.5957398",
"0.5941321",
"0.5928603",
"0.59272295",
"0.59259564",
"0.5909686",
"0.59072196",
"0.59043175",
"0.5893725",
"0.58934164",
"0.5883587",
"0.58730686",
"0.58576703",
"0.5851373",
"0.584799",
"0.5836847",
"0.58317405",
"0.5830703",
"0.58292425",
"0.5816572",
"0.5800395",
"0.5795584",
"0.57850647",
"0.57834125",
"0.5781117",
"0.57694507",
"0.57694507",
"0.5769121",
"0.5768964",
"0.5767732",
"0.57624644",
"0.5757357",
"0.57470775",
"0.5746284",
"0.574483",
"0.5733398"
] | 0.66991824 | 24 |
Apply ANSI color to the given string. | def decorate(string, *colors)
return string if string.empty? || !enabled
validate(*colors)
ansi_colors = colors.map { |color| lookup(color) }
ansi_string = "#{ansi_colors.join}#{string}#{ANSI::CLEAR}"
if ansi_string =~ /(#{Regexp.quote(ANSI::CLEAR)}){2,}/
ansi_string.gsub!(/(#{Regexp.quote(ANSI::CLEAR)}){2,}/, '')
end
matches = ansi_string.scan(/#{Regexp.quote(ANSI::CLEAR)}/)
if matches.length >= 2
ansi_string.sub!(/#{Regexp.quote(ANSI::CLEAR)}/, ansi_colors.join)
end
ansi_string
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ansi_color(color, text)\n #\"\\x1b[38;5;#{color}m#{text}\\x1b[0m\"\n \"\\e[38;5;#{color}m#{text}\\e[0m\"\nend",
"def colorize(str, color_code = 36)\n \"\\e[#{color_code}m#{str}\\e[0m\"\n end",
"def colorize(string, code)\n \"\\e[#{code}m#{string}\\e[0m\"\n end",
"def ansi(string, *codes)\n return string unless $ansi\n s = \"\"\n codes.each do |code|\n s << \"\\e[#{TABLE[code]}m\"\n end\n s << string\n s << CLEAR\n end",
"def bbcode_to_ansi(string, usecolors = T.unsafe(nil)); end",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def red(string)\n \"\\033[0;33m#{string}\\033[0m\"\nend",
"def red(string)\n \"\\033[0;33m#{string}\\033[0m\"\nend",
"def colorize(text, color_code); \"#{color_code}#{text}\\033[0m\"; end",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def colorize(color, text)\n \"\\e[#{color}m#{text}\\e[0m\"\n end",
"def colorize(str, foreground)\n if @color\n fore = FOREGROUND[foreground] or raise ArgumentError, \"Unknown foreground color #{foreground.inspect}\"\n \"#{fore}#{str}#{RESET}\"\n else\n str.to_s\n end\n end",
"def colorize(text, color)\n\t\"\\e[#{Colors[color]}m#{text}\\e[0m\"\nend",
"def puts_red(str)\n puts \" \\e[00;31m#{str}\\e[00m\"\nend",
"def colorize(text, color_code); \"\\e[#{color_code}m#{text}\\e[0m\"; end",
"def ansi(param)\n i = case param\n when Symbol, String\n ColorfulDays.to_ansi_color_index(param.to_sym)\n when Integer\n param\n else\n raise \"unknown parameter: #{param}\"\n end\n\n color_256(i)\n end",
"def green(string)\n \"\\033[0;32m#{string}\\e[0m\"\nend",
"def green(string)\n \"\\033[0;32m#{string}\\e[0m\"\nend",
"def colorize( *args )\n\tstring = ''\n\n\tif block_given?\n\t\tstring = yield\n\telse\n\t\tstring = args.shift\n\tend\n\n\tending = string[/(\\s)$/] || ''\n\tstring = string.rstrip\n\n\treturn ansi_code( args.flatten ) + string + ansi_code( 'reset' ) + ending\nend",
"def yellow(string)\n \"\\033[0;33m#{string}\\e[0m\"\nend",
"def yellow(string)\n \"\\033[0;33m#{string}\\e[0m\"\nend",
"def yellow(string)\n \"\\033[0;33m#{string}\\e[0m\"\nend",
"def yellow(string)\n \"\\033[0;33m#{string}\\e[0m\"\nend",
"def colorize(text, color_code)\n \"#{color_code}#{text}\\033[0m\"\n end",
"def red(string)\n colorize(string, 31)\n end",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def colorize( *args )\n\t\t\tstring = ''\n\n\t\t\tif block_given?\n\t\t\t\tstring = yield\n\t\t\telse\n\t\t\t\tstring = args.shift\n\t\t\tend\n\n\t\t\tending = string[/(\\s)$/] || ''\n\t\t\tstring = string.rstrip\n\n\t\t\treturn ansi_code( args.flatten ) + string + ansi_code( 'reset' ) + ending\n\t\tend",
"def colorize( *args )\n\t\t\tstring = ''\n\n\t\t\tif block_given?\n\t\t\t\tstring = yield\n\t\t\telse\n\t\t\t\tstring = args.shift\n\t\t\tend\n\n\t\t\tending = string[/(\\s)$/] || ''\n\t\t\tstring = string.rstrip\n\n\t\t\treturn ansi_code( args.flatten ) + string + ansi_code( 'reset' ) + ending\n\t\tend",
"def colorize( *args )\n\t\t\tstring = ''\n\n\t\t\tif block_given?\n\t\t\t\tstring = yield\n\t\t\telse\n\t\t\t\tstring = args.shift\n\t\t\tend\n\n\t\t\tending = string[/(\\s)$/] || ''\n\t\t\tstring = string.rstrip\n\n\t\t\treturn ansi_code( args.flatten ) + string + ansi_code( 'reset' ) + ending\n\t\tend",
"def colorize( *args )\n\t\t\tstring = ''\n\n\t\t\tif block_given?\n\t\t\t\tstring = yield\n\t\t\telse\n\t\t\t\tstring = args.shift\n\t\t\tend\n\n\t\t\tending = string[/(\\s)$/] || ''\n\t\t\tstring = string.rstrip\n\n\t\t\treturn ansi_code( args.flatten ) + string + ansi_code( 'reset' ) + ending\n\t\tend",
"def colorize( *args )\n\t\t\tstring = ''\n\n\t\t\tif block_given?\n\t\t\t\tstring = yield\n\t\t\telse\n\t\t\t\tstring = args.shift\n\t\t\tend\n\n\t\t\tending = string[/(\\s)$/] || ''\n\t\t\tstring = string.rstrip\n\n\t\t\treturn ansi_code( args.flatten ) + string + ansi_code( 'reset' ) + ending\n\t\tend",
"def scolora(str)\n str.to_s.\n gsub(/\\e\\[1;33m/,''). # colori 16\n gsub(/\\e\\[0m/,''). # colori 64k\n gsub(/\\e\\[38;\\d+;\\d+m/,'') # end color\nend",
"def colorize(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\nend",
"def green(string)\n \"\\033[0;32m#{string}\\033[0m\"\nend",
"def green(string)\n \"\\033[0;32m#{string}\\033[0m\"\nend",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def color(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\n end",
"def color(text)\n \"\\e[31m#{text}\\e[0m\"\n end",
"def html_to_ansi(string); end",
"def color(str, c)\n ENV['NO_COLOR'] ? str : \"\\033[#{c}m#{str}\\033[0m\"\n end",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\n end",
"def colorize(text, color_code)\n return \"\\e[#{color_code}m#{text}\\e[0m\"\n end",
"def style(s, style); color(s, *Styles[style]) end",
"def colorize(s, c = :green)\n %{\\e[#{c == :green ? 33 : 31}m#{s}\\e[0m}\n end",
"def color(color, str)\n raise \"[BUG] Unrecognized color #{color}\" unless COLORS[color]\n\n # Almost any real Unix terminal will support color,\n # so we just filter for Windows terms (which don't set TERM)\n # and not-real terminals, which aren't ttys.\n return str if ENV[\"TERM\"].nil? || ENV[\"TERM\"].empty? || !STDOUT.tty?\n return \"\\e[#{COLORS[color]}m#{str}\\e[0m\"\n end",
"def put_background_color_to(str, color)\n str.colorize(background: :\"#{color}\")\n end",
"def set_color(string, *colors)\n ansi_colors = colors.map { |color| lookup_color(color) }\n \"#{ansi_colors.join}#{string}#{CLEAR}\"\n end",
"def color(code, str)\n STDOUT.tty? ? \"\\033[#{code}m#{str}\\033[0m\" : str\n end",
"def red(text)\n colorize(text, 31)\nend",
"def decorate(string, style)\n # (fg, bg, bright, underline)\n # fg = style.fg\n # bg = style.bg\n # bright = style.bright\n # underline = style.underline\n return string if !STDOUT.isatty || @output_type == :raw || @disabled\n\n fg = get_colour_instance style.fg\n bg = get_colour_instance style.bg\n\n output = []\n lines = string.lines.map(&:chomp)\n lines = [''] if lines.length.zero?\n lines.each do |line|\n unless line.length < 0\n line = case @palette.type\n when 'ansi' then colour_ansi line, fg, bg\n when 'extended' then colour_extended line, fg, bg\n else raise \"Unknown palette '#{@palette.type}'.\"\n end\n\n line = e(1) + line if style.bright\n line = e(4) + line if style.underline\n line = e(5) + line if style.blink\n line = e(7) + line if style.inverse\n line = e(20) + line if style.fraktur\n line = e(51) + line if style.framed\n\n if (style.bright || style.underline || style.blink || style.inverse || style.fraktur || style.framed) && (fg == nil) && (bg == nil)\n line << e(0)\n end\n end\n\n output.push line\n end\n\n output << '' if string =~ /\\n$/\n output.join \"\\n\"\n end",
"def puts_blue(string)\n puts \"\\033[34m\" + string + \"\\033[0m\"\nend",
"def colorize(text, color=nil)\n CLIColorize.colorize(text, color)\n end",
"def strcolor(string, color=nil)\n return string if !@@options[:colors] || color.nil?\n\n case color\n when \"green\"\n \"\\033[22;32m#{string}\\x1b[0m\"\n when \"red\"\n \"\\033[22;31m#{string}\\x1b[0m\"\n when \"yellow\"\n \"\\033[01;33m#{string}\\x1b[0m\"\n else\n string\n end\n end",
"def yellow(string)\n colorize(string, 33)\n end",
"def output_color(text, color=text.to_i)\r\n # Color matches: 1 - Black; 2 - White; 3 - Red; 4 - Yellow; 5 - Green; 6 - Blue; 7 - Gold\r\n colors = { 1 => 30, 2 => 36, 3 => 31, 4 => 33, 5 => 35, 6 => 34, 7 => 220 }\r\n # \\e[47m Is for the grey foreground \\e[{color} is for picking the color and \\e[0m is for resetting the terminal.\r\n \"\\e[1m\\e[47m\\e[#{colors[color]}m#{text}\\e[0m\\e[22m\"\r\n end",
"def colorize(string, color_code)\n if !defined?(Win32::Console) && !!(RUBY_PLATFORM =~ /win32/ || RUBY_PLATFORM =~ /mingw32/)\n # looks like this person doesn't have Win32::Console and is on windows\n # just return the uncolorized string\n return string\n end\n \"#{CODES[color_code] || color_code}#{string}#{CODES[:reset]}\"\n end",
"def red(text)\n colorize text, \"\\033[1;31m\"\n end",
"def ansi_to_html(string); end",
"def bash_color_codes(string)\n string.gsub(\"\\e[0m\", '</span>').\n gsub(\"\\e[31m\", '<span class=\"color31\">').\n gsub(\"\\e[32m\", '<span class=\"color32\">').\n gsub(\"\\e[33m\", '<span class=\"color33\">').\n gsub(\"\\e[34m\", '<span class=\"color34\">').\n gsub(\"\\e[35m\", '<span class=\"color35\">').\n gsub(\"\\e[36m\", '<span class=\"color36\">').\n gsub(\"\\e[37m\", '<span class=\"color37\">')\n end",
"def colorize(text, color_code)\n \"\\e[0;#{color_code}m#{text}\"\nend",
"def rgb_to_ansi(red, green, blue, use_bright = false)\n color_pool = RGB_COLORS_ANSI.values\n color_pool += RGB_COLORS_ANSI_BRIGHT.values if use_bright\n\n ansi_color_rgb = color_pool.min_by{ |col| rgb_color_distance([red, green, blue],col) }\n if ansi_color = RGB_COLORS_ANSI.key(ansi_color_rgb)\n ANSI_COLORS[ansi_color]\n else\n ansi_color = RGB_COLORS_ANSI_BRIGHT.key(ansi_color_rgb)\n \"#{ANSI_COLORS[ansi_color]};1\"\n end\n end",
"def color(str, color)\n interactive? ? interaction_highline.color(str, color) : str\n end",
"def color(color_code, str)\n tty? ? str : \"\\033[#{color_code}m#{str}\\033[0m\"\n end",
"def green(text)\n colorize(text, 32)\nend",
"def add_bright_color(str, color)\n color = color.to_s.sub(\"bright_\", \"\").to_sym\n str = reset_prev_formatting str, :color\n \"\\e[1m\\e[#{TC_CONFIG[:colors][color].to_s}m#{str}\"\n end",
"def colorize(text, color_code)\n if windows?\n text\n else\n \"\\e[#{color_code}m#{text}\\e[0m\"\n end\n end",
"def green(string)\n colorize(string, 32)\n end",
"def colorize(text, color = :default, bg_color = :default)\n colors = {\n :default => \"38\",\n :black => \"30\",\n :red => \"31\",\n :green => \"32\",\n :brown => \"33\",\n :blue => \"34\",\n :purple => \"35\",\n :cyan => \"36\",\n :gray => \"37\",\n :dark_gray => \"1;30\",\n :light_red => \"1;31\",\n :light_green => \"1;32\",\n :yellow => \"1;33\",\n :light_blue => \"1;34\",\n :light_purple => \"1;35\",\n :light_cyan => \"1;36\",\n :white => \"1;37\"\n }\n\n bg_colors = {\n :default => \"0\",\n :black => \"40\",\n :red => \"41\",\n :green => \"42\",\n :brown => \"43\",\n :blue => \"44\",\n :purple => \"45\",\n :cyan => \"46\",\n :gray => \"47\",\n :dark_gray => \"100\",\n :light_red => \"101\",\n :light_green => \"102\",\n :yellow => \"103\",\n :light_blue => \"104\",\n :light_purple => \"105\",\n :light_cyan => \"106\",\n :white => \"107\"\n }\n\n color_code = colors[color]\n bg_color_code = bg_colors[bg_color]\n return \"\\033[#{bg_color_code};#{color_code}m#{text}\\033[0m\"\nend",
"def colorize(text = '_', color = 'default', bgcolor = 'default')\n colors = {\n 'default' => 38,\n 'black' => 30,\n 'red' => 31,\n 'green' => 32,\n 'brown' => 33,\n 'blue' => 34,\n 'purple' => 35,\n 'cyan' => 36,\n 'gray' => 37,\n 'dark gray' => '1;30',\n 'light red' => '1;31',\n 'light green' => '1;32',\n 'yellow' => '1;33',\n 'light blue' => '1;34',\n 'light purple' => '1;35',\n 'light cyan' => '1;36',\n 'white' => '1;37'\n }\n bgcolors = {\n 'default' => 0,\n 'black' => 40,\n 'red' => 41,\n 'green' => 42,\n 'brown' => 43,\n 'blue' => 44,\n 'purple' => 45,\n 'cyan' => 46,\n 'gray' => 47,\n 'dark gray' => 100,\n 'light red' => 101,\n 'light green' => 102,\n 'yellow' => 103,\n 'light blue' => 104,\n 'light purple' => 105,\n 'light cyan' => 106,\n 'white' => 107\n }\n\n color_code = colors[color]\n bgcolor_code = bgcolors[bgcolor]\n\n return \"\\033[#{bgcolor_code};#{color_code}m#{text}\\033[0m\"\nend",
"def colorize(*args)\n shell.set_color(*args)\n end",
"def red(msg)\n \"\\033[31m#{msg}\\033[39m\"\nend",
"def red(string)\n puts $color.wrap(\"#{string}\").red\nend",
"def colored(s)\n\tif $stdout.tty?\n\t\t\"\\e[0;36;49m#{s}\\e[0m\"\n\telse\n\t\t\"[#{s}]\"\n\tend\nend",
"def parse_ansi(str)\n require 'strscan'\n\n r = []\n f = []\n t = []\n\n a = nil\n s = StringScanner.new(str)\n\n while(!s.eos?)\n # end of formatting\n if s.scan(/(\\e|\\[)\\[0m/)\n t << f.pop\n t.compact!\n if f.empty?\n r << [a, t]\n t = []\n a = nil\n end\n\n # basic formatter\n elsif s.scan(/\\e\\[(3[0-7]|90|1)m/)\n # FIXME need to register formatting for 'a'\n # up to this point (and reset 'a') (and below)\n f << ANSI_COLORS[s[1].to_i]\n\n # sgr\n # https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_(Select_Graphic_Rendition)_parameters\n elsif s.scan(/\\e\\[(([0-9]+;?)+)m/)\n sgr = s[1].split(\";\").collect { |s| s.to_i }\n f << if (30..37).include?(sgr[0])\n ANSI_COLORS[sgr[1]]\n\n elsif sgr[0] == 38\n if sgr[1] == 5\n if sgr[2] < 8\n ANSI_COLORS[sgr[2]]\n\n elsif sgr[2] < 16\n ANSI_COLORS[sgr[2]]\n\n elsif sgr[2] < 232\n # TODO verify:\n # https://stackoverflow.com/questions/12338015/converting-8-bit-color-into-rgb-value\n re = (sgr[2] >> 5) * 32\n gr = ((sgr[2] & 28) >> 2) * 32\n bl = (sgr[2] & 3) * 64\n [re, gr, bl]\n\n else # if srg[2] < 256\n # TODO\n end\n\n else # if sgr[1] == 2\n # TODO\n end\n\n # TODO other sgr commands\n end\n\n else\n a = \"\" if a.nil?\n a += s.scan(/./m)\n\n end\n end\n\n\n # handle remaining / lingering data\n r << [a, (t + f).compact] unless f.empty?\n\n r\n end",
"def colorize txt, fg, bg, flags\n txt\n end",
"def colorize_text(text)\n return text unless ActiveRecordQueryTrace.colorize\n # Try to convert the choosen color from string to integer or try\n # to use the colorize as the color code\n colors = {\n true => \"38\", \"blue\" => \"34\", \"light red\" => \"1;31\",\n \"black\" => \"30\", \"purple\" => \"35\", \"light green\" => \"1;32\",\n \"red\" => \"31\", \"cyan\" => \"36\", \"yellow\" => \"1;33\",\n \"green\" => \"32\", \"gray\" => \"37\", \"light blue\" => \"1;34\",\n \"brown\" => \"33\", \"dark gray\" => \"1;30\", \"light purple\" => \"1;35\",\n \"white\" => \"1;37\", \"light cyan\" => \"1;36\"\n }\n color_code = colors[ActiveRecordQueryTrace.colorize] ||\n ActiveRecordQueryTrace.colorize.to_s\n unless /\\d+(;\\d+){0,1}/.match(color_code)\n raise \"Invalid color. Use one of #{ colors.keys } or a valid color code\"\n end\n \"\\e[#{ color_code }m#{ text }\\e[0m\"\n end",
"def colorize(text, code)\n if output.respond_to?(:tty?) && output.tty?\n \"\\033[#{code}m#{text}\\033[0m\"\n else\n text\n end\n end",
"def add_normal_color(str, color)\n str = reset_prev_formatting str, :color\n \"\\e[#{TC_CONFIG[:colors][color].to_s}m#{str}\"\n end",
"def red(input)\n puts \"\\e[31m#{input}\\e[0m\"\nend",
"def red(input)\n puts \"\\e[31m#{input}\\e[0m\"\nend",
"def ansi?(string)\n !!(string =~ ANSI_MATCHER)\n end",
"def blue(text)\n colorize(text, 34)\nend",
"def color(text, color_code)\n ::Guard::UI.send(:color_enabled?) ? \"\\e[0#{ color_code }m#{ text }\\e[0m\" : text\n end",
"def cyan(string)\n \"\\033[0;36m#{string}\\033[0m\"\nend",
"def safe_colorize(text, color=nil)\n CLIColorize.safe_colorize(text, color)\n end",
"def red_(string)\n print $color.wrap(\"#{string}\").red\nend",
"def color(text, *color_options)\n color_code = ''\n color_options.each do |color_option|\n color_option = color_option.to_s\n if color_option != ''\n if !(color_option =~ /\\d+/)\n color_option = const_get(\"ANSI_ESCAPE_#{ color_option.upcase }\")\n end\n color_code += ';' + color_option\n end\n end\n color_enabled? ? \"\\e[0#{ color_code }m#{ text }\\e[0m\" : text\n end",
"def colorize!(color_code) \"#{COLORS[color_code]}#{self.to_s}\\e[0m\" ; end",
"def color(string, *colors)\n return string unless use_color?\n HighLine.Style(*colors).color(string)\n end",
"def red_style(string)\n pastel = Pastel.new\n red_string = pastel.red(\"#{string}\")\n return red_string\nend",
"def highlight text\n color_code = 7\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def highlight(str = '')\n return '' if str.blank?\n\n str = str.split('=')\n str.count > 1 ? \"\\e[32m#{str[0]}=\\e[33m#{str[1]}\\e[0m\" : \"\\e[32m#{str[0]}\\e[0m\"\nend",
"def green(text)\n colorize text, \"\\033[1;32m\"\n end"
] | [
"0.75238115",
"0.7335181",
"0.73001397",
"0.72790354",
"0.7220566",
"0.71449566",
"0.71449566",
"0.7144078",
"0.7144078",
"0.71135604",
"0.7083712",
"0.7083712",
"0.7083712",
"0.7083712",
"0.7083712",
"0.70121366",
"0.7001054",
"0.6998708",
"0.69800097",
"0.6932377",
"0.6910562",
"0.69069743",
"0.69069743",
"0.6876237",
"0.68673736",
"0.68673736",
"0.68673736",
"0.68673736",
"0.68416625",
"0.683397",
"0.6819466",
"0.68081826",
"0.68081826",
"0.68081826",
"0.68081826",
"0.68081826",
"0.67950386",
"0.6781805",
"0.67784923",
"0.67784923",
"0.6775068",
"0.6775068",
"0.6775068",
"0.6722441",
"0.67215735",
"0.67137796",
"0.66954476",
"0.66912335",
"0.66789716",
"0.66780436",
"0.66327083",
"0.6626632",
"0.6625525",
"0.6608997",
"0.6600219",
"0.6594374",
"0.65760005",
"0.6558493",
"0.65552324",
"0.6554585",
"0.654361",
"0.65430856",
"0.6535586",
"0.6524815",
"0.65217876",
"0.65204865",
"0.65048563",
"0.6475455",
"0.64705014",
"0.6470129",
"0.64651823",
"0.6446745",
"0.6414154",
"0.6401206",
"0.6382194",
"0.6378981",
"0.6370582",
"0.63481146",
"0.6326237",
"0.6322433",
"0.6321171",
"0.6319839",
"0.62960654",
"0.62947196",
"0.62826276",
"0.6280568",
"0.6280568",
"0.6274445",
"0.6274414",
"0.62677157",
"0.6226174",
"0.6220909",
"0.62194055",
"0.62136203",
"0.6201926",
"0.61693716",
"0.6160306",
"0.61541516",
"0.6142667",
"0.6140671"
] | 0.65859956 | 56 |
Strip ANSI color codes from a string. | def strip(string)
string.to_s.gsub(/(\[)?\033(\[)?[;?\d]*[\dA-Za-z](\])?/, '')
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strip_ansi(str)\n str.gsub(ANSI, '')\n end",
"def strip_ansi(str)\n str.gsub(\n ANSI_MATCHR, \"\"\n )\n end",
"def strip_ansi(str)\n str.gsub(/\\e\\[(?:\\d+)(?:;\\d+)?m/, \"\")\n end",
"def strip_color_codes(text); end",
"def unansi(string)\n string.gsub(PATTERN, '')\n end",
"def strip_color\n return self.gsub(/\\e\\[0;[39]\\d;49m/, '').gsub(/\\e\\[0m/, '')\n end",
"def uncolorize(string)\n string.gsub(/\\e\\[(\\d+)(;(\\d+))*m/, '')\n end",
"def strip_color_codes(text)\n text.gsub(/\\e\\[(\\d+)(;\\d+)*m/, '')\n end",
"def strip_color(text)\n text.to_s.gsub(/(\\001)?\\e\\[.*?(\\d)+m(\\002)?/, '')\n end",
"def strip_color(text)\n text.to_s.gsub(/(\\001)?\\e\\[.*?(\\d)+m(\\002)?/ , '')\n end",
"def uncolor(string)\n Style.uncolor(string)\n end",
"def sanitize(string)\n string.gsub(ANSI_MATCHER, \"\")\n end",
"def strip_color\n gsub(COLOR_REGEXP, '')\n end",
"def remove_colors\n gsub(/\\e\\[\\d+m/, '')\n end",
"def clean(message)\n message = message.to_s.strip\n message.gsub!(/\\e\\[[0-9;]*m/, '') # remove useless ansi color codes\n message\n end",
"def decolorize_maybe(str)\n if _pry_.config.color\n str\n else\n Helpers::Text.strip_color str\n end\n end",
"def uncolor(string=nil, &block)\n if block_given?\n block.call.to_str.gsub(ANSI_REGEX, '')\n elsif string.respond_to?(:to_str)\n string.to_str.gsub(ANSI_REGEX, '')\n elsif respond_to?(:to_str)\n to_str.gsub(ANSI_REGEX, '')\n else\n ''\n end\n end",
"def strip_colors\n self.class.new self.raw.gsub(/\\x03(?:[019]?[0-9](?:,[019]?[0-9])?)?/, \"\")\n end",
"def filter_out(str)\n return \"\" if str.nil? || str.empty?\n if @pstack.color_on\n str.gsub!(/\\[COLOR\\s+(\\w+)\\s+ON\\s+(\\w+)\\]/mi) do |m|\n if ColorTable[$1] && ColorTable[$2]\n ColorTable[$1][2]+ColorTable[$2][3]\n else\n ''\n end\n end\n str.gsub!(/\\[COLOR\\s+(\\w+)\\]/mi) do |m|\n if ColorTable[$1]\n ColorTable[$1][2]\n else\n ''\n end\n end\n str.gsub!(/\\[\\/COLOR\\]/mi) do |m|\n ANSICODE['reset']\n end\n str.gsub!(/\\[[BI]\\]/mi) do |m|\n ANSICODE['bold']\n end\n str.gsub!(/\\[U\\]/mi) do |m|\n ANSICODE['underline']\n end\n str.gsub!(/\\[\\/[BUI]\\]/mi) do |m|\n ANSICODE['reset']\n end\n else\n str.gsub!(/\\[COLOR\\s+(\\w+)\\s+ON\\s+(\\w+)\\]/mi,'')\n str.gsub!(/\\[COLOR\\s+(\\w+)\\]|\\[\\/COLOR\\]/mi, '')\n str.gsub!(/\\[SIZE .*?\\]|\\[\\/SIZE\\]/mi, '')\n str.gsub!(/\\[FONT .*?\\]|\\[\\/FONT\\]/mi, '')\n str.gsub!(/\\[[BUI]\\]|\\[\\/[BUI]\\]/mi, '')\n end\n str\n end",
"def scolora(str)\n str.to_s.\n gsub(/\\e\\[1;33m/,''). # colori 16\n gsub(/\\e\\[0m/,''). # colori 64k\n gsub(/\\e\\[38;\\d+;\\d+m/,'') # end color\nend",
"def bbcode_to_ansi(string, usecolors = T.unsafe(nil)); end",
"def decolorize\r\n self.gsub(/\\[0;\\d\\d;\\d\\dm([^\\[]*)\\[0m/) { $1 }\r\n end",
"def decolorize\r\n self.gsub(/\\[0;\\d\\d;\\d\\dm([^\\[]*)\\[0m/) { $1 }\r\n end",
"def stripped(string)\n string.chars.gsub(/[^\\x20-\\x7E]/, '')\n end",
"def decolorize!\n gsub!(/\\e\\[\\d+[;\\d]*m/, '')\n self\n end",
"def filter_string(string)\n string.gsub(/[^0-9a-fA-F]/, '').upcase\n end",
"def unescape(string)\n string.gsub(/#[[:xdigit:]]{2}/) do |match|\n match[1..-1].to_i(16).chr\n end\n end",
"def unescape(string)\n string.gsub(/#[[:xdigit:]]{2}/) do |match|\n match[1..-1].to_i(16).chr\n end\n end",
"def decolorize!\n gsub! /\\e\\[\\d+[;\\d]*m/, ''\n self\n end",
"def strip_control_characters_and_excesses(string)\n last = string.split(\"\\033[2;0f\").last#.gsub(/(\\e\\[\\d+\\w)|(\\e\\[\\w)/,\"\")\n if last.empty?\n \"\"\n else\n last.gsub(/(\\e\\[\\d+\\w)|(\\e\\[\\w)/,\"\").gsub(\" +\",\"\")\n end\nend",
"def colorize(string, color_code)\n if !defined?(Win32::Console) && !!(RUBY_PLATFORM =~ /win32/ || RUBY_PLATFORM =~ /mingw32/)\n # looks like this person doesn't have Win32::Console and is on windows\n # just return the uncolorized string\n return string\n end\n \"#{CODES[color_code] || color_code}#{string}#{CODES[:reset]}\"\n end",
"def raw\n parsed_colors[:string].uncolor\n end",
"def bash_color_codes(string)\n string.gsub(\"\\e[0m\", '</span>').\n gsub(\"\\e[31m\", '<span class=\"color31\">').\n gsub(\"\\e[32m\", '<span class=\"color32\">').\n gsub(\"\\e[33m\", '<span class=\"color33\">').\n gsub(\"\\e[34m\", '<span class=\"color34\">').\n gsub(\"\\e[35m\", '<span class=\"color35\">').\n gsub(\"\\e[36m\", '<span class=\"color36\">').\n gsub(\"\\e[37m\", '<span class=\"color37\">')\n end",
"def remove_colors(data)\n data.gsub(/\\x1B\\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]/, \"\")\n end",
"def unansi(string = T.unsafe(nil)); end",
"def unansi(string = T.unsafe(nil)); end",
"def html_to_ansi(string); end",
"def strip_non_ascii_chars(s)\n # truncate the string at the first null character\n s = s[0, s.index(\"\\x00\")] if s.index(\"\\x00\")\n \n s.gsub(/[^\\x20-\\x7E]/,\"\")\n end",
"def strip_unprintable_characters(s)\n s.tr(8204.chr, \"\")\nend",
"def uncolored\n map {|n| n.to_s.uncolored}\n end",
"def uncolor(string = T.unsafe(nil)); end",
"def uncolor(string = T.unsafe(nil)); end",
"def strip_hashes text\n return text if text =~ /^(?>\\s*)[^\\#]/\n text.gsub(/^\\s*(#+)/) { $1.tr '#',' ' }\n end",
"def only_ansi?(string)\n !!(string =~ /\\A(#{ANSI_MATCHER})+\\z/)\n end",
"def extra_clean_str(str)\n str = str.downcase.gsub @extra_ua_filter, ''\n str = str.gsub(/[^\\x20-\\x7F]/, '')\n str.strip\n end",
"def parse_ansi(str)\n require 'strscan'\n\n r = []\n f = []\n t = []\n\n a = nil\n s = StringScanner.new(str)\n\n while(!s.eos?)\n # end of formatting\n if s.scan(/(\\e|\\[)\\[0m/)\n t << f.pop\n t.compact!\n if f.empty?\n r << [a, t]\n t = []\n a = nil\n end\n\n # basic formatter\n elsif s.scan(/\\e\\[(3[0-7]|90|1)m/)\n # FIXME need to register formatting for 'a'\n # up to this point (and reset 'a') (and below)\n f << ANSI_COLORS[s[1].to_i]\n\n # sgr\n # https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_(Select_Graphic_Rendition)_parameters\n elsif s.scan(/\\e\\[(([0-9]+;?)+)m/)\n sgr = s[1].split(\";\").collect { |s| s.to_i }\n f << if (30..37).include?(sgr[0])\n ANSI_COLORS[sgr[1]]\n\n elsif sgr[0] == 38\n if sgr[1] == 5\n if sgr[2] < 8\n ANSI_COLORS[sgr[2]]\n\n elsif sgr[2] < 16\n ANSI_COLORS[sgr[2]]\n\n elsif sgr[2] < 232\n # TODO verify:\n # https://stackoverflow.com/questions/12338015/converting-8-bit-color-into-rgb-value\n re = (sgr[2] >> 5) * 32\n gr = ((sgr[2] & 28) >> 2) * 32\n bl = (sgr[2] & 3) * 64\n [re, gr, bl]\n\n else # if srg[2] < 256\n # TODO\n end\n\n else # if sgr[1] == 2\n # TODO\n end\n\n # TODO other sgr commands\n end\n\n else\n a = \"\" if a.nil?\n a += s.scan(/./m)\n\n end\n end\n\n\n # handle remaining / lingering data\n r << [a, (t + f).compact] unless f.empty?\n\n r\n end",
"def red(string)\n colorize(string, 31)\n end",
"def convert_ansi_to_html(data)\n COLOR_MAPPING.each do |k, v|\n data.gsub!(/\\e\\[#{k}m/, \"<span style=\\\"color:#{v}\\\">\")\n end\n return data.gsub(/\\e\\[0m/, \"</span>\")\n end",
"def clean_str(str)\n str = str.downcase.gsub @device_ua_filter, ''\n str = str.gsub(/[^\\x20-\\x7F]/, '')\n str.strip\n end",
"def strip str\n str.gsub /(\\x0F|\\x1D|\\02|\\03([0-9]{1,2}(,[0-9]{1,2})?)?)/, \"\"\n end",
"def ansi_to_html(string); end",
"def reset_prev_formatting(str, type)\n case type\n when :color\n str = str.gsub(\"\\e[1m\", '').gsub(/\\e\\[[3][0-7]m/, '')\n when :bg_color\n str = str.gsub(/\\e\\[[4][0-7]m/, '')\n when :underline\n str = str.gsub(\"\\e[4m\", '')\n when :strikethrough\n str = str.gsub(\"\\e[9m\", '')\n end\n\n # Remove ANSI termination characters from `str`.\n str = str.gsub(\"\\e[0m\", '')\n\n # Hack! Add ANSI termination character at the end of `str` if `str`\n # contains any fancy stuff added by term colorizer methods before.\n if str.scan(/\\e\\[[1-9]/).any?\n str = str + \"\\e[0m\" unless str.end_with? \"\\e[0m\"\n end\n\n return str\n end",
"def clean_string(string)\n chars = string.chars\n stack = []\n \n chars.each do |char|\n stack << char if char != '#'\n stack.pop if char == '#'\n end\n \n stack.join\n \nend",
"def colorize(str, color_code = 36)\n \"\\e[#{color_code}m#{str}\\e[0m\"\n end",
"def wash! string\n string.gsub! /\\e\\[[0-9]+m/, \"\"\n end",
"def remove_black_color(env)\n node = env[:node]\n return unless node.element?\n return unless node.attr('style').present?\n node['style'] = node['style'].gsub(/(?<!background-)(color:#000000;?)/, '')\n end",
"def strip(str)\n str.gsub(UNICODE_LEADERS_PAT, '').gsub(UNICODE_TRAILERS_PAT, '')\n end",
"def strip_non_ascii(string)\n strip_diacritics(string).gsub(/[^a-z0-9]+/i, ' ')\n end",
"def regex_strip(string)\n return string.gsub(/[^\\p{L}\\p{N}]/u, \" \")\n end",
"def strip_hashes text\n return text if text =~ /^(?>\\s*)[^\\#]/\n\n empty = ''\n empty = RDoc::Encoding.change_encoding empty, text.encoding\n\n text.gsub(/^\\s*(#+)/) { $1.tr '#', ' ' }.gsub(/^\\s+$/, empty)\n end",
"def sanitize_css(style_string)\n ::Loofah::HTML5::Scrub.scrub_css(style_string)\n end",
"def colorize(string, code)\n \"\\e[#{code}m#{string}\\e[0m\"\n end",
"def ansi(string, *codes)\n return string unless $ansi\n s = \"\"\n codes.each do |code|\n s << \"\\e[#{TABLE[code]}m\"\n end\n s << string\n s << CLEAR\n end",
"def clean(message)\n message = message.to_s.dup\n message.strip!\n message.gsub!(/%/, '%%') # syslog(3) freaks on % (printf)\n message.gsub!(/\\e\\[[^m]*m/, '') # remove useless ansi color codes\n return message\n end",
"def strip_style_tag (s)\ns.gsub(/<style.*<\\/style>/i, '');\nend",
"def parse_colors(s)\n\n line = \"\"\n\n s.each_char do |c|\n line.concat(@colors[c]) if @colors.has_key?(c)\n line.concat(\" \")\n end\n\n line.concat(\"\\033[0m\")\n end",
"def clean(message)\n message = message.to_s.dup\n message.strip!\n message.gsub!(/%/, '%%') # syslog(3) freaks on % (printf)\n message.gsub!(/\\e\\[[^m]*m/, '') # remove useless ansi color codes\n return message\n end",
"def uncolored(text = nil)\n if block_given?\n uncolorize(yield)\n elsif text\n uncolorize(text)\n elsif respond_to?(:to_str)\n uncolorize(to_str)\n else\n ''\n end\n end",
"def strip(s)\n Sanitize.clean(s)\n end",
"def strip_except_escapes(string)\n rstrip_except_escapes(string.lstrip)\n end",
"def ansi?(string)\n !!(string =~ ANSI_MATCHER)\n end",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def filter_out(str)\n return '' if str.nil? || str.empty?\n if !@pstack.binary_on\n str.gsub!(/\\n/, \"\\r\\n\")\n end\n str\n end",
"def rstrip_except_escapes(string)\n string.sub(/(?<!\\\\)\\s+$/, '')\n end",
"def colorize(text, color_code); \"#{color_code}#{text}\\033[0m\"; end",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def strong_strip\n reverse.gsub(/^\\p{Zs}+|^\\p{Cf}+/, '').reverse.gsub(/^\\p{Zs}+|^\\p{Cf}+/, '')\n end",
"def escape_text\n color.strip text.dup\n end",
"def colorize(str, foreground)\n if @color\n fore = FOREGROUND[foreground] or raise ArgumentError, \"Unknown foreground color #{foreground.inspect}\"\n \"#{fore}#{str}#{RESET}\"\n else\n str.to_s\n end\n end",
"def safe_colorize(text, color=nil)\n CLIColorize.safe_colorize(text, color)\n end",
"def decorate(string, *colors)\n return string if string.empty? || !enabled\n validate(*colors)\n ansi_colors = colors.map { |color| lookup(color) }\n ansi_string = \"#{ansi_colors.join}#{string}#{ANSI::CLEAR}\"\n if ansi_string =~ /(#{Regexp.quote(ANSI::CLEAR)}){2,}/\n ansi_string.gsub!(/(#{Regexp.quote(ANSI::CLEAR)}){2,}/, '')\n end\n matches = ansi_string.scan(/#{Regexp.quote(ANSI::CLEAR)}/)\n if matches.length >= 2\n ansi_string.sub!(/#{Regexp.quote(ANSI::CLEAR)}/, ansi_colors.join)\n end\n ansi_string\n end",
"def white_out(str)\n str.delete(\" \\n\\t\")\nend",
"def clean_string(string)\n string = string.gsub(/\\r|\\n/,'').sub(/^ */,'').sub(/\\s*$/,'').gsub(/ +/,' ')\n coder = HTMLEntities.new()\n string = coder.decode(string) # Remove html entities\n return string\n end",
"def trim(string)\n string.sub(/^[ ]+/, '').sub(/[ ]+$/, '')\n end",
"def colorize(text, color_code)\n \"#{color_code}#{text}\\033[0m\"\n end",
"def strip_line_numbers(str_with_ruby_code)\n str_with_ruby_code.gsub(/ *\\d+: ? ?/, \"\")\n end",
"def strip(str); end",
"def no_leading_spaces str\n return str.force_encoding('ASCII-8BIT').gsub(/\\302\\240/,'').strip # What a hack.\n end",
"def fix_color(color)\n # check for 3 or 6 character hexadecimal value\n if (color and color.match(/^([0-9]|[a-f]|[A-F]){3}(([0-9]|[a-f]|[A-F]){3})?$/))\n color = '#' + color\n end\n\n return color\n end",
"def colorize(text)\n Albino.new(text, self).colorize(:O => 'stripnl=false')\n end",
"def colorize(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\nend",
"def colorize!(color_code) \"#{COLORS[color_code]}#{self.to_s}\\e[0m\" ; end",
"def white\n colorize(37)\n end",
"def strip_control_and_extended_characters\n self.chars.inject(\"\") do |str, char|\n if char.ascii_only? and char.ord.between?(32,126)\n str << char\n end\n str\n end\n end",
"def cleanup(string)\n string.gsub!(/^--- $/, \"\")\n end"
] | [
"0.8508064",
"0.830651",
"0.7984447",
"0.7951355",
"0.7910524",
"0.7729496",
"0.76483184",
"0.7619692",
"0.7607166",
"0.7519985",
"0.7481777",
"0.746306",
"0.7399694",
"0.7231407",
"0.7126164",
"0.71191233",
"0.71106493",
"0.68943185",
"0.6791379",
"0.65753347",
"0.6528369",
"0.6524128",
"0.6524128",
"0.6501694",
"0.64826876",
"0.63639",
"0.6353926",
"0.6353926",
"0.635253",
"0.6345247",
"0.6331354",
"0.6321461",
"0.63058794",
"0.629626",
"0.6284018",
"0.6284018",
"0.6274851",
"0.62159693",
"0.61328083",
"0.61137575",
"0.6103266",
"0.6103266",
"0.6094901",
"0.60916483",
"0.60732466",
"0.6059683",
"0.60310906",
"0.5975363",
"0.5970995",
"0.59709126",
"0.59289056",
"0.592769",
"0.5901922",
"0.5901894",
"0.5889758",
"0.58710957",
"0.5854472",
"0.58401185",
"0.5807579",
"0.5796989",
"0.5783822",
"0.5773346",
"0.5764173",
"0.5746766",
"0.5733841",
"0.5713526",
"0.56930655",
"0.5677106",
"0.56442124",
"0.56368214",
"0.56275475",
"0.56265044",
"0.56265044",
"0.55890816",
"0.5588829",
"0.55722207",
"0.55649346",
"0.55649346",
"0.55649346",
"0.55649346",
"0.55649346",
"0.5547949",
"0.5543883",
"0.55305535",
"0.5523054",
"0.55073917",
"0.5496281",
"0.5489505",
"0.54862005",
"0.54797095",
"0.54777753",
"0.54755485",
"0.54490167",
"0.54460627",
"0.53997564",
"0.5397616",
"0.5397525",
"0.53965676",
"0.53885275",
"0.5371548"
] | 0.7600902 | 9 |
Return raw color code without embeding it into a string. | def code(*colors)
validate(*colors)
colors.map { |color| lookup(color) }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def raw\n parsed_colors[:string].uncolor\n end",
"def strip_color_codes(text); end",
"def to_c\n\t\t\tif color == \"white\"\n\t\t\t\t\"\\u26aa\"\n\t\t\telsif color == \"red\"\n\t\t\t\t\"\\u26d4\"\n\t\t\telsif color == \"black\"\n\t\t\t\t\"\\u26ab\"\n\t\t\tend\n\t\tend",
"def strip_colors\n self.class.new self.raw.gsub(/\\x03(?:[019]?[0-9](?:,[019]?[0-9])?)?/, \"\")\n end",
"def strip_color\n return self.gsub(/\\e\\[0;[39]\\d;49m/, '').gsub(/\\e\\[0m/, '')\n end",
"def get_color_code\n\t\t{ r: @term_hex[0], g: @term_hex[1], b: @term_hex[2], alpha: @term_hex[-1] }\n\tend",
"def code\n index = 16 +\n RGB.to_ansi_domain(@red) * 36 +\n RGB.to_ansi_domain(@green) * 6 +\n RGB.to_ansi_domain(@blue)\n\n \"#{@ground_code};5;#{index}\"\n end",
"def strip_color\n gsub(COLOR_REGEXP, '')\n end",
"def rgb_code(red, green, blue, background = T.unsafe(nil)); end",
"def rgb_code(red, green, blue, background = T.unsafe(nil)); end",
"def names_to_code(colorname)\n \"\"\n end",
"def actual_color\n ColorCode.where(numeric_code: default_code.split(\"-\")[-2])[0]\n end",
"def color(code, str, partial = T.unsafe(nil)); end",
"def uncolor(string = T.unsafe(nil)); end",
"def uncolor(string = T.unsafe(nil)); end",
"def colorize!(color_code) \"#{COLORS[color_code]}#{self.to_s}\\e[0m\" ; end",
"def render_ansi\n qrcode.as_ansi if valid?\n end",
"def to_s\n \"#{@hex_color}\"\n end",
"def to_s\n @normalized_rcdata.gsub(/&(?:#([0-9]+));/o) {|s|\n u = $1.to_i\n if 0 <= u && u <= 0x7f\n [u].pack(\"C\")\n else\n '?'\n end\n }\n end",
"def strip_color_codes(text)\n text.gsub(/\\e\\[(\\d+)(;\\d+)*m/, '')\n end",
"def to_s\n @color\n end",
"def hex_code(string, background = T.unsafe(nil)); end",
"def hex_code(string, background = T.unsafe(nil)); end",
"def color_code(val)\n if val.is_a? String\n code = val\n val = code.hex\n val = -1 if val == 0 && code != '00'\n elsif val.is_a? Fixnum\n code = val\n val = val.to_i\n code = '%02x' % val\n else\n return nil\n end\n\n if (0..255).cover? val\n code\n else\n nil\n end\n end",
"def to_truecolor_bytes(color)\n [r(color), g(color), b(color)]\n end",
"def to_truecolor_bytes(color)\n [r(color), g(color), b(color)]\n end",
"def escape_text\n color.strip text.dup\n end",
"def full_color_code(color)\n matched = color.match(REGEX_COLOR_CODE)\n raise ArgumentError, 'Ex: #222' if matched.nil?\n case matched[:color].length\n when 3; matched[:color] * 2\n when 6; matched[:color]\n else\n raise ArgumentError, \"Wrong color code, ex: #333 | #333333\"\n end\n end",
"def remove_colors\n gsub(/\\e\\[\\d+m/, '')\n end",
"def bg_red; use_code(41) end",
"def red\n colorize(31)\n end",
"def to_s\n case @color\n when :black then '♚'\n when :white then '♔'\n end\n end",
"def strip_color(text)\n text.to_s.gsub(/(\\001)?\\e\\[.*?(\\d)+m(\\002)?/, '')\n end",
"def normal_color\n #return Color.new(255,255,255)\n end",
"def green\n colorize(32)\n end",
"def uncolored\n map {|n| n.to_s.uncolored}\n end",
"def to_s\n if self.color == :white\n \" ♜ \"\n else\n \" ♖ \"\n end\n end",
"def bash_color_codes(string)\n string.gsub(\"\\e[0m\", '</span>').\n gsub(\"\\e[31m\", '<span class=\"color31\">').\n gsub(\"\\e[32m\", '<span class=\"color32\">').\n gsub(\"\\e[33m\", '<span class=\"color33\">').\n gsub(\"\\e[34m\", '<span class=\"color34\">').\n gsub(\"\\e[35m\", '<span class=\"color35\">').\n gsub(\"\\e[36m\", '<span class=\"color36\">').\n gsub(\"\\e[37m\", '<span class=\"color37\">')\n end",
"def bbcode_to_ansi(string, usecolors = T.unsafe(nil)); end",
"def colorize_code(code, complete: true, ignore_error: false, colorable: colorable?)\n return code unless colorable\n\n symbol_state = SymbolState.new\n colored = +''\n length = 0\n end_seen = false\n\n scan(code, allow_last_error: !complete) do |token, str, expr|\n # IRB::ColorPrinter skips colorizing fragments with any invalid token\n if ignore_error && ERROR_TOKENS.include?(token)\n return Reline::Unicode.escape_for_print(code)\n end\n\n in_symbol = symbol_state.scan_token(token)\n str.each_line do |line|\n line = Reline::Unicode.escape_for_print(line)\n if seq = dispatch_seq(token, expr, line, in_symbol: in_symbol)\n colored << seq.map { |s| \"\\e[#{s}m\" }.join('')\n colored << line.sub(/\\Z/, clear(colorable: colorable))\n else\n colored << line\n end\n end\n length += str.bytesize\n end_seen = true if token == :on___end__\n end\n\n # give up colorizing incomplete Ripper tokens\n unless end_seen or length == code.bytesize\n return Reline::Unicode.escape_for_print(code)\n end\n\n colored\n end",
"def strip_color(text)\n text.to_s.gsub(/(\\001)?\\e\\[.*?(\\d)+m(\\002)?/ , '')\n end",
"def encode_color color\n\t\tcase color.downcase\n\t\twhen 'red', 'r' then Mastermind::COLORS[:red]\n\t\twhen 'green', 'g' then Mastermind::COLORS[:green]\n\t\twhen 'yellow', 'y' then Mastermind::COLORS[:yellow]\n\t\twhen 'blue', 'b' then Mastermind::COLORS[:blue]\n\t\twhen 'magenta', 'm' then Mastermind::COLORS[:magenta]\n\t\twhen 'cyan', 'c' then Mastermind::COLORS[:cyan]\n\t\telse nil\n\t\tend\n\tend",
"def rgb\n if Configuration.colour_mode == 16777216\n sprintf(\"\\e[38;2;%s;%s;%sm\", *css_to_rgb)\n\n else\n numbered\n\n end\n end",
"def raw\n repr.raw\n end",
"def invalid_color_msg(_clr)\n ''\n end",
"def invalid_color_msg(_clr)\n ''\n end",
"def hex color\n Renderer.hex(color)\n end",
"def uncolor(string=nil, &block)\n if block_given?\n block.call.to_str.gsub(ANSI_REGEX, '')\n elsif string.respond_to?(:to_str)\n string.to_str.gsub(ANSI_REGEX, '')\n elsif respond_to?(:to_str)\n to_str.gsub(ANSI_REGEX, '')\n else\n ''\n end\n end",
"def hex_color(args = T.unsafe(nil)); end",
"def normal_color\n return Color.new(255, 255, 255)\n end",
"def to_string\r\n \"#{colour}\"\r\n end",
"def convert_code\n @code.map{ |num| translate_num_to_color(num) }\n end",
"def color_code(color)\n \"\\e[#{(color.is_a? Symbol) ? (COLOR_CODES[color] || COLOR_CODES[:default]) : color.to_i}m\"\n end",
"def normal_color; return Color.normal_color; end",
"def r\n\t\t\tColor.r(@color)\n\t\tend",
"def scolora(str)\n str.to_s.\n gsub(/\\e\\[1;33m/,''). # colori 16\n gsub(/\\e\\[0m/,''). # colori 64k\n gsub(/\\e\\[38;\\d+;\\d+m/,'') # end color\nend",
"def colorize(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\nend",
"def to_s\n s = 'RGB'\n s += 'A' if alpha != ALPHA_OPAQUE\n s += \"[#{@index}]\" if @index\n s += '#' + [red, green, blue].map { |e| '%02X' % e }\n .join('')\n s += '%02X' % alpha if alpha != ALPHA_OPAQUE\n s\n end",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def colorize(color_code)\n \"\\e[#{color_code};40m#{self}\\e[0m\"\n end",
"def colorize(color_code)\n \"\\e[#{color_code}m#{self}\\e[0m\"\n end",
"def colorize(color_code)\n \"\\e[#{color_code}m#{self}\\e[0m\"\n end",
"def decolorize\r\n self.gsub(/\\[0;\\d\\d;\\d\\dm([^\\[]*)\\[0m/) { $1 }\r\n end",
"def decolorize\r\n self.gsub(/\\[0;\\d\\d;\\d\\dm([^\\[]*)\\[0m/) { $1 }\r\n end",
"def colorize(text, color_code); \"#{color_code}#{text}\\033[0m\"; end",
"def inspect\n \"#<#{self.class.name}: {\" + map { |b|\n case b\n when (0x07..0x0d), (0x20..0x7e)\n b.chr.dump\n when 0x00\n # sly hack to make char-sets more friendly\n # to us C programmers\n '\"\\0\"'\n else\n \"0x%02x\" % b\n end\n }.join(', ') + \"}>\"\n end",
"def white\n colorize(37)\n end",
"def rgb(r, g, b)\n [r, g, b].map do |c|\n if c <= 0 \n \"00\"\n elsif c > 255\n \"FF\"\n else\n c.to_s(16).upcase \n end\n end.join('')\nend",
"def decolorize_maybe(str)\n if _pry_.config.color\n str\n else\n Helpers::Text.strip_color str\n end\n end",
"def colour_for(char)\n return ''.freeze if char.colour == @colour\n\n @colour = char.colour\n @colour.to_s\n end",
"def uncolorize\n @uncolorized || self\n end",
"def uncolorize(string)\n string.gsub(/\\e\\[(\\d+)(;(\\d+))*m/, '')\n end",
"def to_s\n 'RicColor: ' + self.send(@color)\n end",
"def r; self.color.r end",
"def colorize(string, color_code)\n if !defined?(Win32::Console) && !!(RUBY_PLATFORM =~ /win32/ || RUBY_PLATFORM =~ /mingw32/)\n # looks like this person doesn't have Win32::Console and is on windows\n # just return the uncolorized string\n return string\n end\n \"#{CODES[color_code] || color_code}#{string}#{CODES[:reset]}\"\n end",
"def decolorize!\n gsub!(/\\e\\[\\d+[;\\d]*m/, '')\n self\n end",
"def disable_colorization(value = T.unsafe(nil)); end",
"def color(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\n end",
"def colorized?; end",
"def rgb(r, g, b)\n \"#{to_hex r}#{to_hex g}#{to_hex b}\"\nend",
"def to_s\n \"#<Wx::Colour: (#{red}, #{green}, #{blue} *#{alpha})>\"\n end",
"def colour_for(char)\n return '' if char.colour == @colour\n\n @colour = char.colour\n @colour.to_s\n end",
"def colorize(str, color_code = 36)\n \"\\e[#{color_code}m#{str}\\e[0m\"\n end",
"def inspect\n alpha? ? rgba_str : hex_str\n end",
"def inspect\n alpha? ? rgba_str : hex_str\n end",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def bg_dark_grey; use_code(100) end",
"def to_color color\n \"\\x1b[#{COLOR_CODE[color]}m#{to_s}\\x1b[m\"\n end",
"def safe_colorize(text, color=nil)\n CLIColorize.safe_colorize(text, color)\n end",
"def get_color(key)\n if key.is_a? String\n color = key\n elsif Wirb::COLORS.key?(key)\n color = Wirb::COLORS[key]\n end\n\n color ? \"\\033[#{ color }m\" : ''\n end",
"def raw_code\n @raw_code ||= (File.read path).to_s.force_encoding(Encoding::UTF_8)\n end",
"def colorize(text, color_code); \"\\e[#{color_code}m#{text}\\e[0m\"; end",
"def colour(name, text)\n if Pry.color\n str = Pry::Helpers::Text.send(name, text)\n unless str.start_with?(\"\\001\")\n str = \"\\001#{Pry::Helpers::Text.send name, '{text}'}\\002\".sub '{text}', \"\\002#{text}\\001\"\n end\n str\n else\n text\n end\nend",
"def disable_color\n return translate_color(7)\n end",
"def inspect\n pretty.uncolor\n end",
"def convert_ansi_to_html(data)\n COLOR_MAPPING.each do |k, v|\n data.gsub!(/\\e\\[#{k}m/, \"<span style=\\\"color:#{v}\\\">\")\n end\n return data.gsub(/\\e\\[0m/, \"</span>\")\n end"
] | [
"0.8024637",
"0.6739806",
"0.66919494",
"0.65730506",
"0.65545756",
"0.65379274",
"0.64491004",
"0.6409886",
"0.64058053",
"0.64058053",
"0.63524437",
"0.6298113",
"0.6255913",
"0.62427396",
"0.62427396",
"0.6221583",
"0.6201253",
"0.6180551",
"0.6160523",
"0.6151703",
"0.61091954",
"0.6102691",
"0.6102691",
"0.60076314",
"0.59924287",
"0.59924287",
"0.598664",
"0.5954894",
"0.59335315",
"0.5931358",
"0.59199655",
"0.5915179",
"0.5908267",
"0.5899358",
"0.589832",
"0.5874802",
"0.5874731",
"0.58539224",
"0.58423674",
"0.581968",
"0.57975364",
"0.5784634",
"0.5783022",
"0.5774866",
"0.5774588",
"0.5774588",
"0.57574886",
"0.5754067",
"0.5749043",
"0.5745656",
"0.57333",
"0.56957626",
"0.5695029",
"0.56897974",
"0.56894153",
"0.5687948",
"0.56840736",
"0.5674649",
"0.5673573",
"0.5673573",
"0.5673573",
"0.5673573",
"0.5673573",
"0.5661737",
"0.56444216",
"0.56444216",
"0.56200814",
"0.56200814",
"0.5617506",
"0.5609899",
"0.56060207",
"0.5605341",
"0.5599748",
"0.55964893",
"0.5594079",
"0.558882",
"0.55779743",
"0.5572559",
"0.55723906",
"0.5568511",
"0.5567635",
"0.5559782",
"0.55500644",
"0.5539336",
"0.5523737",
"0.5522579",
"0.55221534",
"0.55212885",
"0.55212885",
"0.5520294",
"0.5520294",
"0.5518933",
"0.5512033",
"0.550869",
"0.54979736",
"0.5494094",
"0.5492567",
"0.54902184",
"0.5487025",
"0.54865676",
"0.5483135"
] | 0.0 | -1 |
Expose all ANSI color names and their codes | def styles
ANSI.constants(false).each_with_object({}) do |col, acc|
acc[col.to_sym.downcase] = lookup(col)
acc
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def colors\n color_codes.keys\n end",
"def convert_ansi_to_html(data)\n COLOR_MAPPING.each do |k, v|\n data.gsub!(/\\e\\[#{k}m/, \"<span style=\\\"color:#{v}\\\">\")\n end\n return data.gsub(/\\e\\[0m/, \"</span>\")\n end",
"def ansi_color(color, text)\n #\"\\x1b[38;5;#{color}m#{text}\\x1b[0m\"\n \"\\e[38;5;#{color}m#{text}\\e[0m\"\nend",
"def bash_color_codes(string)\n string.gsub(\"\\e[0m\", '</span>').\n gsub(\"\\e[31m\", '<span class=\"color31\">').\n gsub(\"\\e[32m\", '<span class=\"color32\">').\n gsub(\"\\e[33m\", '<span class=\"color33\">').\n gsub(\"\\e[34m\", '<span class=\"color34\">').\n gsub(\"\\e[35m\", '<span class=\"color35\">').\n gsub(\"\\e[36m\", '<span class=\"color36\">').\n gsub(\"\\e[37m\", '<span class=\"color37\">')\n end",
"def red\n colorize(31)\n end",
"def colors\n @wmiiconfig.read(\"/bar/#{@name}/colors\").split(/\\s+/).map do |txt|\n txt.scan(/[a-fA-F0-9]{2}/).map{|hex| hex.to_i(16)}\n end\n end",
"def colorize!(color_code) \"#{COLORS[color_code]}#{self.to_s}\\e[0m\" ; end",
"def get_color_code\n\t\t{ r: @term_hex[0], g: @term_hex[1], b: @term_hex[2], alpha: @term_hex[-1] }\n\tend",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def red(string)\n \"\\033[0;31m#{string}\\e[0m\"\nend",
"def text_colors\n @colors = {\n red: 31,\n yellow: 33,\n green: 32\n }\n end",
"def list_colors\n color_string = \"\\nThe available colors are \"\n COLORS.each_with_index do |color, index|\n if index%2 == 0\n color_string += \"\\n\"\n end\n color_string += color + \" \"\n end\n puts color_string\n end",
"def ansi(param)\n i = case param\n when Symbol, String\n ColorfulDays.to_ansi_color_index(param.to_sym)\n when Integer\n param\n else\n raise \"unknown parameter: #{param}\"\n end\n\n color_256(i)\n end",
"def red\n colorize \"\\033[31m\"\n end",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend",
"def ansi_sequences\n ANSI_ESCAPE_SEQUENCE_RX.each_match(self).collect do |match|\n ANSI.recognize(match[0])\n end\n end",
"def palette\n [\n \"Background: #{background_color}\",\n \"Foreground: #{foreground_color}\",\n \"Bold Text : #{bold_color}\",\n \"Cursor : #{cursor_color}\",\n \"Font : #{font}\"\n ].join(\"\\n\")\n end",
"def colorize(text, color_code); \"\\e[#{color_code}m#{text}\\e[0m\"; end",
"def hsla_color; end",
"def get_color(key)\n if key.is_a? String\n color = key\n elsif Wirb::COLORS.key?(key)\n color = Wirb::COLORS[key]\n end\n\n color ? \"\\033[#{ color }m\" : ''\n end",
"def color_map\n {\n 'high_risk_ice_check_in' => '#800080',\n 'friend_in_detention' => '#e83737',\n }\n end",
"def print_colors\n 1.upto(6) { |i| print \"#{i} = \" + \"\\u2b24\".color(COLORS[i]) + \" \" }\n print \": \"\nend",
"def puts_red(str)\n puts \" \\e[00;31m#{str}\\e[00m\"\nend",
"def escape_ansi_to_html(data)\n {1 => :nothing,\n 2 => :nothing,\n 4 => :underline,\n 5 => :nothing,\n 7 => :nothing,\n 30 => :black,\n 31 => :red,\n 32 => :green,\n 33 => :yellow,\n 34 => :blue,\n 35 => :magenta,\n 36 => :cyan,\n 37 => :white,\n 39 => :bold,\n 40 => :nothing\n }.each do |key, value|\n if value != :nothing\n data.gsub!(/\\e\\[#{key}m/, \"<span class=\\\"#{value}\\\">\")\n else\n data.gsub!(/\\e\\[#{key}m/, \"<span>\")\n end\n end\n data.gsub!('[39;1m', '')\n data.gsub!(/\\e\\[0m/, '</span>')\n data\n end",
"def named\n [\"\\e[\", foreground_codes[colour], 'm'].join\n end",
"def scolora(str)\n str.to_s.\n gsub(/\\e\\[1;33m/,''). # colori 16\n gsub(/\\e\\[0m/,''). # colori 64k\n gsub(/\\e\\[38;\\d+;\\d+m/,'') # end color\nend",
"def colorize(text, color_code); \"#{color_code}#{text}\\033[0m\"; end",
"def parse_colors(s)\n\n line = \"\"\n\n s.each_char do |c|\n line.concat(@colors[c]) if @colors.has_key?(c)\n line.concat(\" \")\n end\n\n line.concat(\"\\033[0m\")\n end",
"def colorNames\n colors = [\"red\", \"orange\", \"yellow\", \"green\", \"mint\", \"navy\", \"light blue\", \"lavender\", \"plum\", \"pink\"]; \n return colors;\n end",
"def colors\n keys = []\n COLORS.each_key do |key|\n keys << key\n end\n keys\n end",
"def foreground_color(index)\n \"\\e[38;5;#{index}m\"\nend",
"def colors\n keys = []\n COLORS.each_key do | key |\n keys << key\n end\n keys\n end",
"def color_table\n [0, 1, 4, 5, 7].each do |attr|\n puts '----------------------------------------------------------------'\n puts \"ESC[#{attr};Foreground;Background\"\n 30.upto(37) do |fg|\n 40.upto(47) do |bg|\n print \"\\033[#{attr};#{fg};#{bg}m #{fg};#{bg} \"\n end\n puts \"\\033[0m\"\n end\n end\n end",
"def std_colors\n FFI::NCurses.use_default_colors\n # 2018-03-17 - changing it to ncurses defaults\n FFI::NCurses.init_pair(0, FFI::NCurses::BLACK, -1)\n FFI::NCurses.init_pair(1, FFI::NCurses::RED, -1)\n FFI::NCurses.init_pair(2, FFI::NCurses::GREEN, -1)\n FFI::NCurses.init_pair(3, FFI::NCurses::YELLOW, -1)\n FFI::NCurses.init_pair(4, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(5, FFI::NCurses::MAGENTA, -1)\n FFI::NCurses.init_pair(6, FFI::NCurses::CYAN, -1)\n FFI::NCurses.init_pair(7, FFI::NCurses::WHITE, -1)\n # ideally the rest should be done by application\n #FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, -1)\n #FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, -1)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n\n FFI::NCurses.init_pair(14, FFI::NCurses::WHITE, FFI::NCurses::CYAN)\n=begin\n FFI::NCurses.init_pair(8, FFI::NCurses::WHITE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(9, FFI::NCurses::BLUE, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(10, FFI::NCurses::BLACK, FFI::NCurses::GREEN)\n FFI::NCurses.init_pair(11, FFI::NCurses::BLACK, FFI::NCurses::YELLOW)\n FFI::NCurses.init_pair(12, FFI::NCurses::BLACK, FFI::NCurses::BLUE)\n FFI::NCurses.init_pair(13, FFI::NCurses::BLACK, FFI::NCurses::MAGENTA)\n FFI::NCurses.init_pair(14, FFI::NCurses::BLACK, FFI::NCurses::CYAN)\n FFI::NCurses.init_pair(15, FFI::NCurses::BLACK, FFI::NCurses::WHITE)\n=end\n end",
"def red(string)\n \"\\033[0;33m#{string}\\033[0m\"\nend",
"def red(string)\n \"\\033[0;33m#{string}\\033[0m\"\nend",
"def colors; end",
"def print_colors\n 1.upto(COLORS.size) { |i| print \"#{i} = \" + \" \".color(COLORS[i]) + \" \" }\n print \": \"\nend",
"def reset_colors\n @color_output = false\n\n #Term::ANSIColor.coloring = true\n c = Term::ANSIColor\n @color_app_info = c.intense_white + c.bold\n @color_app_exe = c.intense_green + c.bold\n @color_command = c.intense_yellow\n @color_description = c.intense_white\n @color_parameter = c.intense_cyan\n @color_usage = c.intense_black + c.bold\n \n @color_error_word = c.intense_black + c.bold\n @color_error_name = c.intense_red + c.bold\n @color_error_description = c.intense_white + c.bold\n \n @color_bold = c.bold\n @color_reset = c.reset\n end",
"def red(msg)\n \"\\033[31m#{msg}\\033[39m\"\nend",
"def named_colors\n @named_colors ||= PdfK::NAMED_COLORS.dup\n end",
"def red(text)\n colorize text, \"\\033[1;31m\"\n end",
"def color_codes\n {\n info: '#63C5DC',\n warning: 'warning',\n success: 'good',\n fatal: 'danger'\n }\n end",
"def raw\n parsed_colors[:string].uncolor\n end",
"def to_c\n\t\t\tif color == \"white\"\n\t\t\t\t\"\\u26aa\"\n\t\t\telsif color == \"red\"\n\t\t\t\t\"\\u26d4\"\n\t\t\telsif color == \"black\"\n\t\t\t\t\"\\u26ab\"\n\t\t\tend\n\t\tend",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def color(text)\n \"\\e[31m#{text}\\e[0m\"\n end",
"def color(color=32)\n printf \"\\033[#{color}m\"\n yield\n printf \"\\033[0m\"\nend",
"def init_colors\n $desc_color = \"#{GREEN}\" # color of description portion\n # color the title based on priority\n $p5color = \"#{BLUE}#{BOLD}\" \n $p4color = \"#{MAGENTA}\" \n $p3color = \"#{CYAN}#{BOLD}\" \n $p2color = \"#{BOLD}\"\n $p1color = \"#{YELLOW}#{ON_RED}\"\n #\n # color for only the type column\n $bugcolor = \"#{BLACK}#{ON_RED}\"\n $enhcolor = \"#{GREEN}\"\n $feacolor = \"#{CYAN}\"\n\n # color for row of started event\n $startedcolor = \"#{STANDOUT}\"\n\n cols = %x[tput colors] rescue 8\n cols = cols.to_i\n if cols >= 256\n $desc_color = \"\\x1b[38;5;236m\" # 256 colors, grey\n $p5color = \"\\x1b[38;5;57m\" # some kinda blue\n $p4color = \"\\x1b[38;5;239m\" # grey. 256 colors\n $p3color = \"\\x1b[38;5;244m\" # grey, 256 colors\n end\n end",
"def output_color(text, color=text.to_i)\r\n # Color matches: 1 - Black; 2 - White; 3 - Red; 4 - Yellow; 5 - Green; 6 - Blue; 7 - Gold\r\n colors = { 1 => 30, 2 => 36, 3 => 31, 4 => 33, 5 => 35, 6 => 34, 7 => 220 }\r\n # \\e[47m Is for the grey foreground \\e[{color} is for picking the color and \\e[0m is for resetting the terminal.\r\n \"\\e[1m\\e[47m\\e[#{colors[color]}m#{text}\\e[0m\\e[22m\"\r\n end",
"def to_color color\n \"\\x1b[#{COLOR_CODE[color]}m#{to_s}\\x1b[m\"\n end",
"def color(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\n end",
"def display_color_index\n require_color_echo_get\n\n CE.rainbow\n cnt = 134\n @padding = \" \" * 2\n\n header = \"OK, Let me check color index list... :)\"\n mes = CE.rainbow.get(@padding + \"-\" * cnt) + $/\n mes += @padding + \" \" * ((cnt - header.size)/2) + CE.rainbow.get(header) + $/\n mes += CE.rainbow.get(@padding + \"-\" * cnt) + $/\n\n mes += @padding\n 256.times do |i|\n num = i + 1\n mes += CE.fg(\"index#{num}\".intern).get(\"index#{num}\" + \" \" * (4 - num.to_s.size))\n mes += CE.bg(\"index#{num}\".intern).get(\" \" * 5)\n mes += \" \" * 3\n\n if num % 8 == 0\n mes += $/ * 2\n mes += @padding if num != 256\n end\n end\n print mes \n\n exit 0\nend",
"def method_missing symbol, *others\n if symbol.to_s !~ /\\Aansi_.*\\Z/\n return super \n end\n\n color_code = ANSI_CMDS[symbol.to_s[5..-1].to_sym]\n end_code = :reset\n if color_code.is_a?(Array)\n color_code, end_code = color_code\n end\n if end_code.is_a?(Symbol)\n end_code = ANSI_CMDS[end_code]\n end\n\n if color_code\n colorize(color_code, end_code)\n else\n raise \"Unknown ansi code\"\n end\n end",
"def colorize(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\nend",
"def possible_colors\n %w(R G B Y)\n end",
"def scan_for_colors\n scan(/\\033\\[([0-9;]+)m(.+?)\\033\\[0m|([^\\033]+)/m).map do |match|\n split_colors(match)\n end\n end",
"def scan_for_colors\n scan(/\\033\\[([0-9;]+)m(.+?)\\033\\[0m|([^\\033]+)/m).map do |match|\n split_colors(match)\n end\n end",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def colorize(text, color_code)\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def colorize(*args)\n shell.set_color(*args)\n end",
"def display_colors\n\t\tprint \"\\nColors: \"\n\t\tMastermind::COLORS.each do |color, _color_code|\n\t\t\tunless color == :blank || color == :black || color == :white\n\t\t\t\tcolor_string = color.to_s.capitalize\n\t\t\t\tprint Mastermind::color(\" #{color_string} \", color)\n\t\t\tend\n\t\tend\n\t\tputs \"\\nChoose a color with it's full name or it's first character\"\n\tend",
"def colour(name, text)\n if Pry.color\n str = Pry::Helpers::Text.send(name, text)\n unless str.start_with?(\"\\001\")\n str = \"\\001#{Pry::Helpers::Text.send name, '{text}'}\\002\".sub '{text}', \"\\002#{text}\\001\"\n end\n str\n else\n text\n end\nend",
"def nc\n Ncurses::COLOR_PAIR(@id)\n end",
"def colorize(color_code)\n \"\\e[#{color_code}m#{self}\\e[0m\"\n end",
"def colorize(color_code)\n \"\\e[#{color_code}m#{self}\\e[0m\"\n end",
"def color_codes\n {\n :black => 0, :light_black => 60,\n :red => 1, :light_red => 61,\n :green => 2, :light_green => 62,\n :yellow => 3, :light_yellow => 63,\n :blue => 4, :light_blue => 64,\n :magenta => 5, :light_magenta => 65,\n :cyan => 6, :light_cyan => 66,\n :white => 7, :light_white => 67,\n :default => 9\n }\n end",
"def green(string)\n \"\\033[0;32m#{string}\\e[0m\"\nend",
"def green(string)\n \"\\033[0;32m#{string}\\e[0m\"\nend",
"def start_color\n \"\\033[\"\n end",
"def color\n @color ||= COLORS[label.length%COLORS.length].to_sym\n end",
"def convert_man_to_ansi file\n lines = file.split \"\\n\"\n l = nil\n lines.each_with_index do |line, ix|\n # convert underlined words to yellow or one color, these are usually params\n line.gsub!(/((_\b[^ ])+)/,'\u001b[4;33m\\1\u001b[0m')\n line.gsub!(/_\b/,'')\n # convert bold words to red or one color, these are usually headers and other words\n l= line.gsub(/(([^ ]\b[^ ])+)/,'\u001b[1;31m\\1\u001b[0m').gsub(/[^ ]\b/,'').gsub(/\b/,'')\n lines[ix] = l\n end\n lines\nend",
"def rgb\n if Configuration.colour_mode == 16777216\n sprintf(\"\\e[38;2;%s;%s;%sm\", *css_to_rgb)\n\n else\n numbered\n\n end\n end",
"def colorize(str, color_code = 36)\n \"\\e[#{color_code}m#{str}\\e[0m\"\n end",
"def colorize(color_code)\n \"\\e[#{color_code};40m#{self}\\e[0m\"\n end",
"def rgb_to_ansi(red, green, blue, use_bright = false)\n color_pool = RGB_COLORS_ANSI.values\n color_pool += RGB_COLORS_ANSI_BRIGHT.values if use_bright\n\n ansi_color_rgb = color_pool.min_by{ |col| rgb_color_distance([red, green, blue],col) }\n if ansi_color = RGB_COLORS_ANSI.key(ansi_color_rgb)\n ANSI_COLORS[ansi_color]\n else\n ansi_color = RGB_COLORS_ANSI_BRIGHT.key(ansi_color_rgb)\n \"#{ANSI_COLORS[ansi_color]};1\"\n end\n end",
"def colors\n return\n end",
"def colors\n Outpost::Config.instance.colors\n end",
"def format_color(name, text)\n if Pry.color\n \"\\001#{Pry::Helpers::Text.send name, '{text}'}\\002\".sub '{text}', \"\\002#{text}\\001\"\n else\n text\n end\nend",
"def display_rainbow(colors)\n puts \"R: #{colors[4]}, O: #{colors[1]}, Y: #{[5]}, G: #{colors[3]}, B: #{colors[0]}, I: #{colors[2]}, V: #{colors[6]}\"\n puts colors\nend",
"def colour(name, text)\n if Pry.color\n \"\\001#{Pry::Helpers::Text.send name, '{text}'}\\002\".sub '{text}', \"\\002#{text}\\001\"\n else\n text\n end\nend",
"def colorNormal\n puts \"\\033[0m\"\n end",
"def simple(color_name, background = false)\n (background ? 40 : 30) + ANSI_COLORS[color_name]\n end",
"def simple(color_name, background = false)\n (background ? 40 : 30) + ANSI_COLORS[color_name]\n end",
"def simple(color_name, background = false)\n (background ? 40 : 30) + ANSI_COLORS[color_name]\n end",
"def red\n colorize(:red)\nend",
"def green\n colorize(32)\n end",
"def colorize(string, code)\n \"\\e[#{code}m#{string}\\e[0m\"\n end",
"def red(input)\n puts \"\\e[31m#{input}\\e[0m\"\nend",
"def red(input)\n puts \"\\e[31m#{input}\\e[0m\"\nend",
"def to_s\n [name, COLORS[ color ]].join(\"\\n\")\n end",
"def red(text)\n colorize(text, 31)\nend",
"def colorsPrimary\n @color_rgb_strings_primary = []\n @color_rgb.each do |rgb|\n r = rgb[\"red\"]\n g = rgb[\"green\"]\n b = rgb[\"blue\"]\n @color_rgb_strings_primary << \"rgb(#{r},#{g},#{b})\"\n end\n end",
"def available_colors_translated\n colors_translated = {}\n Calendar::Event::COLORS.each do |color, hex|\n colors_translated[t(\"colors.#{color}\")] = hex;\n end\n colors_translated\n end",
"def colors\n hex_values = @lines.map { |line| line[/#([0-9A-F]{6})/, 1] }.compact\n hex_values.map { |hex| Colour::RGB.from_html(hex) }\n end"
] | [
"0.73594534",
"0.6982657",
"0.6969286",
"0.68380195",
"0.6805915",
"0.67983305",
"0.675174",
"0.6712724",
"0.6695327",
"0.6695327",
"0.66877675",
"0.66730374",
"0.66616553",
"0.6640176",
"0.6585285",
"0.6585285",
"0.6585285",
"0.6585285",
"0.6585285",
"0.6573913",
"0.6569377",
"0.65374106",
"0.6521003",
"0.6501264",
"0.6484984",
"0.647838",
"0.6462809",
"0.64592874",
"0.6425444",
"0.6423631",
"0.64215773",
"0.64121693",
"0.63924384",
"0.6389802",
"0.63872534",
"0.6386661",
"0.6374567",
"0.6373757",
"0.6362667",
"0.6362667",
"0.63411033",
"0.6326767",
"0.63197464",
"0.6315445",
"0.6313791",
"0.6305411",
"0.6303588",
"0.63035643",
"0.6303251",
"0.6299881",
"0.6299875",
"0.62939453",
"0.6287098",
"0.62787056",
"0.62762475",
"0.62740463",
"0.6270344",
"0.62624377",
"0.6258497",
"0.62567145",
"0.62523127",
"0.62523127",
"0.6249957",
"0.6249957",
"0.6249957",
"0.62335443",
"0.6228061",
"0.6223299",
"0.62223345",
"0.6220979",
"0.6220979",
"0.62140137",
"0.6212481",
"0.6212481",
"0.6203941",
"0.6200311",
"0.61982167",
"0.61928767",
"0.6170212",
"0.6169006",
"0.6161885",
"0.61586505",
"0.61461484",
"0.61440724",
"0.61401457",
"0.61266744",
"0.6113843",
"0.61104435",
"0.61104435",
"0.61104435",
"0.6108996",
"0.61032337",
"0.60994005",
"0.6084995",
"0.6084995",
"0.6083359",
"0.607762",
"0.6074548",
"0.60606015",
"0.60587484"
] | 0.6631542 | 14 |
List all available style names | def style_names
styles.keys
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def styles\n mentos(:get_all_styles)\n end",
"def list_styles(**opt)\n # Overridden by the subclass if configured for search analysis.\n end",
"def get_styles &block\n response = self.search :styles => nil\n doc = Nokogiri::XML(response)\n styles = doc.xpath(Style.root_xpath).collect{|l| l.text.to_s }\n list Style, styles, {}, &block\n end",
"def style_types\n %w( primary success info )\n end",
"def stylist\n\n end",
"def styles\n return @metadata[:styles]\n end",
"def styles\n @styles ||= DEFAULT_STYLES\n end",
"def getStyleName\n styleNameHelper(MODE_GET)\n end",
"def styles\n ANSI.constants(false).each_with_object({}) do |col, acc|\n acc[col.to_sym.downcase] = lookup(col)\n acc\n end\n end",
"def printGetSupportedStyles()\n printDebugMessage('printGetSupportedStyles', 'Begin', 1)\n paramsList = soapGetSupportedStyles()\n paramsList.each { |param|\n puts param\n }\n printDebugMessage('printGetSupportedStyles', 'End', 1)\n end",
"def styles\n return if @styles.empty?\n @styles.uniq.sort\n end",
"def styles\n a = []\n a << super\n # Header Styles\n a << {name: 'lt-gray', bg_color: \"A9A9A9\"}\n a << { name: 'gray', bg_color: \"808080\"}\n a << { name: 'two-decimal-places', format_code: \"#,##0.00\" }\n a.flatten\n end",
"def styles; end",
"def styles; end",
"def styles; end",
"def all_styles(include_hidden = false)\n doc = Scraping.noko_doc(URI.join(BASE_URL, '/beerstyles/'))\n root = doc.at_css('div.container-fluid')\n\n categories = root.css('h3').map(&:text)\n style_node = root.css('.styleGroup')\n\n styles = style_node.flat_map.with_index do |list, i|\n list.css('a').map do |x|\n category = categories[i]\n Style.new(x['href'].split('/').last.to_i, name: x.text).tap do |s|\n s.category = category\n end\n end\n end\n if include_hidden\n styles + hidden_styles\n else\n styles\n end\n end",
"def printGetFormatStyles(dbName, formatName)\n printDebugMessage('printGetFormatStyles', 'Begin', 1)\n formatInfo = getFormatInfo(dbName, formatName)\n if formatInfo != nil\n formatInfo.each_element('styleInfoList/styleInfo') { |styleInfo|\n puts styleInfo.elements['name'].text\n }\n end\n printDebugMessage('printGetFormatStyles', 'End', 1)\n end",
"def styles\n @styles ||= Hash.new do |_styles, stylename|\n _styles[stylename] = Style.new\n _styles[stylename].stylename = stylename\n _styles[stylename].stylesheet = self\n _styles[stylename]\n end\n end",
"def retrieve(name)\n self.all.detect{|style| style.name == name}\n end",
"def style_files\n styles.map do |name|\n next name if name.include?('/')\n style_path\n .reject { |p| p.strip.empty? }\n .map { |p| File.join(p, \"#{name}.css\") }\n .detect { |file| File.exist?(file) }\n end\n end",
"def styles=(styles)\n @styles = (%w[man] + styles).uniq\n end",
"def predefined_styles\r\n @predefined_styles ||=\r\n begin\r\n tmp = {}\r\n styles do |s|\r\n tmp = {\r\n bold: s.add_style(b: true, alignment: { vertical: :top }),\r\n date: s.add_style(format_code: 'mm/dd/yyyy', alignment: { vertical: :top }),\r\n float: s.add_style(format_code: '#,##0.00', alignment: { vertical: :top }),\r\n integer: s.add_style(format_code: '#,##0', alignment: { vertical: :top }),\r\n percent: s.add_style(num_fmt: 9, alignment: { vertical: :top }),\r\n currency: s.add_style(num_fmt: 7, alignment: { vertical: :top }),\r\n text: s.add_style(format_code: '@', alignment: { vertical: :top }),\r\n wrapped: s.add_style(alignment: { wrap_text: true, vertical: :top }),\r\n normal: s.add_style(alignment: { vertical: :top })\r\n }\r\n end\r\n tmp\r\n end\r\n end",
"def css_styles\n @css_styles ||= []\n end",
"def inline_styles\n parser.css(\"style\").to_a\n end",
"def available_citation_styles (id = 'all')\n @citation_style_settings.fetch('AvailableStyles',{}).select{|item| item['Id'] == id || id == 'all'}\n end",
"def report_styles(size = 12)\n @org_name_style ||= style.add_style b: true,\n sz: size,\n alignment: { horizontal: :left, wrap_text: true }\n end",
"def styles\n @document.styles\n end",
"def rated_styles\n beers.map{|b| b.style}.uniq\n end",
"def styles\n [\n {:name => 'general', :num_fmt => 0},\n {:name => 'currency', :num_fmt => 5},\n {:name => 'percent', :num_fmt => 9},\n {:name => 'date', :format_code => \"yyyy-mm-dd\"},\n {:name => 'text_left', :alignment => { :horizontal => :left, :vertical => :center , :wrap_text => false}},\n {:name => 'text_center', :alignment => { :horizontal => :center, :vertical => :center , :wrap_text => false}},\n {:name => 'text_right', :alignment => { :horizontal => :right, :vertical => :center , :wrap_text => false}}\n ]\n end",
"def get_basic_style\n (\"#{role} #{$aod_current_cell_style[$aod_tl]} #{$aod_current_list_item_style[$aod_ll]}\").strip\n end",
"def admin_edit_layout_style_list\n %w( tt_edit_layout_style_list@booktrope.com )\n end",
"def styles\n @styles ||= Hash.new{ |h, k| h[k] = {} }\n end",
"def soapGetSupportedStyles()\n printDebugMessage('soapGetSupportedStyles', 'Begin', 1)\n soapConnect\n res = @soap.getSupportedStyles({})\n if(2 <= @debugLevel)\n p res\n end\n printDebugMessage('soapGetSupportedStyles', 'End', 1)\n return res['getSupportedStylesReturn']\n end",
"def css_styles\n end",
"def index\n @styles = Style.all\n end",
"def index\n @stylists = Stylist.all\n end",
"def styles=(_arg0); end",
"def detected_styles; end",
"def inspect\n \"Teacup::Stylesheet[#{name.inspect}] = #{@styles.inspect}\"\n end",
"def page_style(name)\n content_for(:page_style) { name }\n end",
"def index\n @cooking_styles = CookingStyle.all\n end",
"def style\n @style\n end",
"def stamp_styles\n x = []\n style.each_pair do |key, value|\n x << \"#{key}=\\\"#{value}\\\"\"\n end\n x.join ' '\n end",
"def stylesheets\n styles.zip(style_files).map do |name, path|\n base = File.basename(path, '.css')\n raise \"style not found: #{style.inspect}\" if path.nil?\n {\n name: name,\n path: path,\n base: File.basename(path, '.css'),\n media: base =~ /(print|screen)$/ ? $1 : 'all'\n }\n end\n end",
"def inspect\n \"#{self.class.name}[#{name.inspect}] = #{styles.inspect}\"\n end",
"def style\n fetch('vehicle.styles')\n end",
"def show_style\n :short\n end",
"def style_types\n @xml.xpath('/styleSheet/cellXfs/xf').map do |xstyle|\n style_type_by_num_fmt_id(\n xstyle.attributes['numFmtId']&.value\n )\n end\n end",
"def list_colors\n color_string = \"\\nThe available colors are \"\n COLORS.each_with_index do |color, index|\n if index%2 == 0\n color_string += \"\\n\"\n end\n color_string += color + \" \"\n end\n puts color_string\n end",
"def starter_stylesheet\n {\n :header => [:bold, :yellow],\n :add => [:bold, :green],\n :remove => [:bold, :red],\n :range => [:bold, :magenta],\n :trailing_whitespace => [:background, :red]\n }\n end",
"def colorNames\n colors = [\"red\", \"orange\", \"yellow\", \"green\", \"mint\", \"navy\", \"light blue\", \"lavender\", \"plum\", \"pink\"]; \n return colors;\n end",
"def theme_css(name)\n \"themes/#{name}/style\"\n end",
"def collect_styles(*styles)\n @options = {}\n styles.each do |style|\n @options.update(styles.delete(style)) if style.is_a?(Hash)\n end\n \n # If no styles provided, we adapt all styles.\n styles = styles.empty? ? @style_list.clone : styles\n \n # except = styles.delete(:except)\n except = @options[:except]\n except = [except] if (except && !except.is_a?(Enumerable))\n except.collect! do |item| item.is_a?(String) ? item.to_sym : item end if !except.nil?\n # only = styles.delete(:only)\n only = @options[:only]\n only = [only] if (!only.nil? && !only.is_a?(Enumerable))\n only.collect! do |item| item.is_a?(String) ? item.to_sym : item end if !only.nil?\n\n # Only defaults to all styles if not defined.\n if !only.nil? then\n # Remove styles to display if not in only group\n styles.delete_if do |item| !only.include?(item) end\n end\n # Remove excepted styles\n if !except.nil? then\n styles.delete_if do |item| except.include?(item) end\n end\n\n styles\n end",
"def styles\n yield @styles if block_given?\n @styles\n end",
"def style\n return \"\" if self.class.styles_pool.blank?\n\n common_style = self.class.styles_pool[:_common] || []\n common_style_without_fill_window = common_style - [FILL_WINDOW_CSS]\n if screen = params.try(:[], :screen)\n screen_style = self.class.styles_pool[screen] || []\n (screen_style.blank? ? common_style : (common_style_without_fill_window + screen_style)).uniq.join(\" \")\n else\n common_style.join(\" \")\n end\n end",
"def fonts_list\n initiate_library\n FONTS_LIBRARY.keys\n end",
"def getStyleString\n\t\tstyle=\"\"\n\t\t(1..@StyleLtarr.length).each do |i|\n\t\t\tstyle=style+@StylePrefix+(i).to_s+\" lt \"+@StyleLtarr[i-1]+\" lw \"+@oc[\"LineWidth\"]\n\t\t\tstyle=style+\" pt \"+@StylePtarr[i-1]+\" ps 0.5;\\n\"\n\t\tend\n\t\tstyle=style+\"set style line 20 lt 7 lw 1 pt 4 ps 0.5;\\n\"\n\t\treturn style\n\tend",
"def getStyledNamespaceName(nsName)\r\n return CodeNameStyling.getStyled(nsName, @langProfile.classNameStyle)\r\n end",
"def style(options = {})\n Kamelopard::Style.new options\n end",
"def [](type)\n (@styles_by_type ||= {})[type.to_sym] ||= []\n end",
"def getStyle(filename)\n DEFAULT_FILE_TYPES.invert[File.extname(filename)]\n end",
"def style; end",
"def style; end",
"def style; end",
"def lookup(name)\n @@loaded_styles\n .find { |style| style.name.to_sym == name }\n .tap { |style| raise InvalidFrameStyleName, name if style.nil? }\n end",
"def print_stylists_with_index(user)\n all_stylists = Stylist.all\n all_stylists.each_with_index do |stylist, index|\n i = index+1\n puts i.to_s + \" \" + stylist.name\n end\n end",
"def style(options); end",
"def find_css\n @dom.css(STYLE_ELEMENT_QUERY).map { |element| read_stylesheet(element) }.compact\n end",
"def stylesheets\n sheets = []\n sheets << settings.stylesheets.keys if settings.respond_to?('stylesheets')\n sheets << @styles if @styles\n sheets.flatten.uniq.map do |css|\n \"<link href='#{path_to_styles css }' rel='stylesheet' type='text/css'>\"\n end.join\n end",
"def font_style(style=nil)\n cur_page.font_style(style)\n end",
"def style\n end",
"def stylesheets\n @parts.values.map(&:stylesheet).compact\n end",
"def calendar_date_select_stylesheets(options = {})\n return [] if @cds_stylesheets_loaded\n\n @cds_stylesheets_loaded = true\n\n options.assert_valid_keys(:style)\n \"calendar_date_select/#{options[:style] || \"default\"}\"\n end",
"def start_style(name)\n case name\n when \"negative\"\n \"<span style=\\\"color: white; background-color: black\\\">\"\n when /on_(\\w+)/\n colval = color_value($1)\n \"<span style=\\\"background-color: #{colval}\\\">\"\n else\n colval = color_value(name)\n \"<span style=\\\"color: #{colval}\\\">\"\n end\n end",
"def defined_style_conversions; end",
"def css_name\n my_color = self.to_i\n css_name = nil\n Symbol.css_colors.each do |color, hex|\n if hex == my_color\n css_name = color\n break\n end\n end\n return css_name\n end",
"def correct_style_detected; end",
"def correct_style_detected; end",
"def correct_style_detected; end",
"def css_class_names\n @css_class_names ||= []\n end",
"def allows_style?\n true\n end",
"def style\n defined?(@style) ? @style : 0\n end",
"def getStyledPathName(pathName)\r\n return CodeNameStyling.getStyled(pathName, @langProfile.fileNameStyle)\r\n end",
"def hidden_styles\n hidden_ids = [40, 41, 57, 59, 66, 67, 68, 69, 70,\n 75, 83, 99, 104, 106, 116, 119, 120]\n hidden_ids.map do |id|\n Style.new(id)\n end\n end",
"def exist?(name)\n self.all.any?{|style| style.name == name}\n end",
"def style\n return @style\n end",
"def style(s, style); color(s, *Styles[style]) end",
"def get_color_list_name\n\t\t@availible_colors.keys[0]\n\tend",
"def index\n @class_styles = ClassStyle.all\n end",
"def default_style\n return 0\n end",
"def method_missing(stylename, &styles)\n properties = Limelight.new(&styles).styles\n style(stylename, properties)\n end",
"def optns_list\n @optns_list ||= Select2BikeBinder.configuration.color_option_keys.map do |k| \n [ColorNameI18n::Color.new(k).name.capitalize, k]\n end\n @optns_list \n end",
"def options_for_style(style_key)\n if style = self.class.styles[style_key]\n style\n else\n raise ArgumentError, \"Breadcrumbs style #{style_key.inspect} not found. Use any of #{self.class.styles.keys.inspect}.\"\n end\n end",
"def index\n @beer_styles = BeerStyle.all\n end",
"def index\r\n @grooming_styles = GroomingStyle.all\r\n end",
"def index\n @style_sheets = @site.style_sheets.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @style_sheets }\n end\n end",
"def style_guide\n @props = {styles: helpers.generate_style_array}\n end",
"def all_themes\n Dir.chdir(wasko_directory) do |path|\n Dir[\"*.color\"].map do |filename|\n filename.gsub(/\\.color$/, \"\")\n end\n end\n end",
"def index()\n @styles = Style.reorder(style_order_by(\"styles\")).paginate(page: params[:page], per_page: 4)\n @title = \"All Styles\" \n @type = \"Style\"\n end",
"def implicit_headings\n @implicit_headings ||= begin\n headings = []\n doc.css(\"[style]\").each do |element|\n headings.push element unless element.font_size.nil? || element.font_size < MIN_HEADING_SIZE\n end\n headings\n end\n end"
] | [
"0.7380433",
"0.7204228",
"0.67233413",
"0.67182636",
"0.6682742",
"0.6538333",
"0.6526848",
"0.65208",
"0.6436264",
"0.63944024",
"0.63812304",
"0.6332522",
"0.61585027",
"0.61585027",
"0.61585027",
"0.6083059",
"0.6081906",
"0.60627264",
"0.6058523",
"0.6042654",
"0.601944",
"0.60140353",
"0.6012071",
"0.6003429",
"0.5941052",
"0.5913613",
"0.5909524",
"0.58914536",
"0.5854669",
"0.5828134",
"0.5821066",
"0.58088076",
"0.58055866",
"0.57547784",
"0.57164013",
"0.57057697",
"0.5697042",
"0.56889015",
"0.56846887",
"0.5673147",
"0.56721336",
"0.56680447",
"0.5634391",
"0.5630871",
"0.5627943",
"0.5627405",
"0.56263894",
"0.56216615",
"0.56176573",
"0.55947566",
"0.5584178",
"0.5583726",
"0.5574171",
"0.55704117",
"0.55499846",
"0.55110854",
"0.54903287",
"0.54714966",
"0.54652923",
"0.5460484",
"0.546013",
"0.5445612",
"0.5445612",
"0.5445612",
"0.5442657",
"0.5434521",
"0.54332864",
"0.5426999",
"0.5418868",
"0.54175943",
"0.5416605",
"0.5376655",
"0.5371179",
"0.53702724",
"0.53606844",
"0.5335624",
"0.53340584",
"0.53340584",
"0.53340584",
"0.53323734",
"0.53237057",
"0.5322725",
"0.5320368",
"0.53198624",
"0.53147995",
"0.53100103",
"0.5308422",
"0.53083885",
"0.53008",
"0.52994365",
"0.52975875",
"0.52910686",
"0.5285798",
"0.5281004",
"0.5263218",
"0.5260206",
"0.52512807",
"0.52499807",
"0.5242178",
"0.523861"
] | 0.83735865 | 0 |
Check if provided colors are known colors | def valid?(*colors)
colors.all? { |color| style_names.include?(color.to_sym) }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_valid_color?(color_string)\n COLORS.include?(color_string)\n end",
"def colors?\n scan(/%([a-z]+)/).each do\n return true if Regexp.last_match(1).validate_color\n end\n false\n end",
"def color_valid\n color == \"blue\" || color == \"green\"\n end",
"def color_valid?(_clr)\n true\n end",
"def color_valid?(_clr)\n true\n end",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\n end",
"def color_valid(color)\n %w[blue green].any? { |c| c == color }\n #color == \"blue\" || color == \"green\"\nend",
"def colorized?\n scan_for_colors.inject([]) do |colors, match|\n colors << match.tap(&:pop)\n end.flatten.compact.any?\n end",
"def colorized?\n scan_for_colors.inject([]) do |colors, match|\n colors << match.tap(&:pop)\n end.flatten.compact.any?\n end",
"def contains_color?\n self[COLOR_REGEXP]\n end",
"def much_green?(colors) ; colors[1] > 200 ; end",
"def valid_input?(colors)\n valid_colors = %w(r b g o y w)\n if colors.length == 4 && valid_colors?(colors)\n true\n else\n puts \"!!!Invalid format and/or Invalid Colors - colors not added!!!\"\n end\n end",
"def color_valid(color)\r\n color == \"blue\" || color == \"green\"\r\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def color_valid\n color == \"blue\" || color == \"green\"\nend",
"def color_valid1(color)\n if color == \"blue\" || color == \"green\"\n true\n else\n false\n end\nend",
"def valid?(color)\n color.is_a?(Color) || # color object\n NAMED_COLORS.key?(color) || # keyword\n hex?(color) || # hexadecimal value\n ( # Array of Floats from 0.0..1.0\n color.instance_of?(Array) &&\n color.length == 4 &&\n color.all? { |el| el.is_a?(Numeric) }\n )\n end",
"def isColor(c)\n if c == \"r\" or c == \"b\" or c == \"g\" or c == \"y\" or c == \"c\" or c == \"m\" then\n return true\n else\n return false\n end\nend",
"def color_valid_test(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\"\nend",
"def valid_color?(col)\n return false unless col[0] == '#'\n\n col[0] = ''\n color = col.chars\n valid = true\n color.each { |char| valid = false unless [*('a'..'f'), *('0'..'9')].include?(char) }\n valid\n end",
"def color_valid3(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid(color)\n (color == 'blue' || color == 'green') ? true : false\nend",
"def check_inside_Colors (string)\n Colors.include?(string)\n end",
"def color_valid(color)\n color == 'blue' || color == 'green' ? true : false\nend",
"def color_valid(color)\n color == \"blue\" || color == \"green\" ? true : false\nend",
"def color_valid(color)\n return true if color == \"blue\" || color == \"green\"\nend",
"def color_valid_simple(color)\n color == \"blue\" || color == \"green\"\nend",
"def color_valid2(color)\n color == \"blue\" || color == \"green\"\nend",
"def has_colors?(data)\n data.match(/\\x1B\\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]/)\n end",
"def in_check?(color)\n end",
"def correct_color?(array)\n @correct_colors = array & $code_strings\n end",
"def validate_color(clr)\n return true unless clr\n raise UserError, invalid_color_msg(clr) unless color_valid?(clr)\n end",
"def validate_color(clr)\n return true unless clr\n raise UserError, invalid_color_msg(clr) unless color_valid?(clr)\n end",
"def color?\n !@color.nil?\n end",
"def scan_for_colors; end",
"def validate_colors\n super\n data.each do |d|\n if d.is_a?(Hash) and c = d[:color]\n validate_color(c)\n end\n end\n end",
"def validate_eye_color(color)\n\t\t[\"amb\", \"blu\", \"brn\", \"gry\", \"grn\", \"hzl\", \"oth\"].include?(color)\n\tend",
"def check_color\n state = read_state\n return state[:color] != COLOR_OFF\n end",
"def color?(color)\n match color,\n on(Black | Grey.(-> v { v < 0.2 }), 'black-ish'),\n on(White | Grey.(-> v { v > 0.8 }), 'white-ish'),\n on(Grey.(-> v { v >= 0.2 }) & Grey.(-> v { v <= 0.8 }), 'grey-ish'),\n on(Pink, \"that's not a color ;)\")\nend",
"def pixel_is_colour?( x, y, colours, fuzz = 0 )\n colours = [colours] unless colours.is_a?( Array ) && colours.first.is_a?( Array )\n colour_of_pixel = get_pixel_colour( x, y )\n\n colours.each do |colour|\n return true if self.class.compare_colours?( colour, colour_of_pixel, fuzz )\n end\n\n return false\n end",
"def rgb?(color_str)\n !!(color_str.to_s.downcase.gsub(/\\s+/, '') =~ RGB_REGEX)\n end",
"def color_valid(color)\n\tcolor == \"blue\" || color = \"green\"\nend",
"def color_vavid(color)\n color == \"blue\" || color == \"green\"\nend",
"def opaque?\n all? { |color| Color.opaque?(color) }\n end",
"def use_color?\n use_color\n end",
"def hex?(color_str)\n !!(color_str.to_s.downcase.strip =~ HEX_REGEX)\n end",
"def possible_colors\n %w(R G B Y)\n end",
"def hex?(color_string)\n # MRuby doesn't support regex, otherwise we'd do:\n # !(/^#[0-9A-F]{6}$/i.match(a).nil?)\n color_string.instance_of?(String) &&\n color_string[0] == '#' &&\n color_string.length == 7\n end",
"def colorized?\n !defined?(@uncolorized).nil?\n end",
"def colorized?\n !defined?(@uncolorized).nil?\n end",
"def validate_colors\n super\n validate_color(foreground) if foreground\n end",
"def color?\n false\n end",
"def check? color\n pieces = select_all_pieces_of(other_color(color))\n king_position = find_king_position(color)\n\n pieces.any? do |piece|\n piece.possible_moves.any? do |move|\n movement_helper?(piece.position, move, piece.color) && move == king_position\n end\n end\n end",
"def color?\n @color\n end",
"def valid_chars?(guess)\n if !guess.upcase.each_char.all? { |char| @colors.has_value? char }\n puts \"That is not a valid guess. Please guess from these colors: #{@colors.values.join}\"\n false\n else\n true\n end \n end",
"def black_and_white?\n entries == [ChunkyPNG::Color::BLACK, ChunkyPNG::Color::WHITE]\n end",
"def ==(*color)\n color = (color[0].is_a? Sketchup::Color)? color[0].to_a : color.to_a.flatten\n return self.to_a == color\n end",
"def red?\n not black?\n end",
"def correct_color_input?(input_ary, ref_ary)\n (input_ary - ref_ary).empty?\n end",
"def ==(a_colour)\n values == a_colour.values\n end",
"def validate_colour(input)\n if VALID_COLOUR_LIST.include?(input.to_s.upcase)\n input.downcase.to_sym\n else \n false\n end\n end",
"def supports_rgb_color?\n true\n end",
"def colour?\n self.size == 1 && ('A'..'Z').include?(self)\n end",
"def input_correct?(guess)\n guess.all? {|i| @color_array.any?{|x| x==i}}\nend",
"def ==(other)\n val = Color.parse(other)\n return false if val.nil?\n r == val.r && g == val.g && b == val.b && a == val.a\n end",
"def rgba?(color_str)\n !!(color_str.to_s.downcase.gsub(/\\s+/, '') =~ RGBA_REGEX)\n end",
"def validate_color(color)\n if (color<0) \n return 0\n elsif (color>255)\n return 255\n else\n return color\n end\n end",
"def check?(color)\n king(color).in_check?\n end",
"def compareColor(c1, c2, c3)\n\tif c1.color == c2.color and c1.color == c3.color\n\t\treturn true\n\telsif c1.color != c2.color and c1.color != c3.color and c2.color != c3.color\n\t\treturn true\n\telse\n\t\treturn false\n\tend\nend",
"def legal_from_2?(from, color)\n occupied_spaces(color).include?(from)\n end",
"def different_color?(other_suit)\n (black? && other_suit.red?) || (red? && other_suit.black?)\n end",
"def rgb?\n colorspace == \"rgb\"\n end"
] | [
"0.8056831",
"0.7976372",
"0.78964174",
"0.78522",
"0.78522",
"0.7704579",
"0.76846343",
"0.7656823",
"0.7656823",
"0.76385766",
"0.75507593",
"0.7525427",
"0.7422714",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.739179",
"0.7382212",
"0.7373704",
"0.7362441",
"0.7339612",
"0.7330598",
"0.7328764",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.73286843",
"0.7322848",
"0.73112106",
"0.73028505",
"0.730209",
"0.7250024",
"0.72243625",
"0.7221084",
"0.72166675",
"0.7168159",
"0.7152976",
"0.7121192",
"0.7073148",
"0.6989791",
"0.6989791",
"0.6989581",
"0.69780976",
"0.69583464",
"0.6904858",
"0.68864363",
"0.68622684",
"0.6794602",
"0.6712996",
"0.6708918",
"0.6692025",
"0.6689742",
"0.66656375",
"0.6657649",
"0.66510534",
"0.65880483",
"0.6579427",
"0.6579427",
"0.65765643",
"0.6567748",
"0.65052205",
"0.6504461",
"0.6495694",
"0.64846873",
"0.647849",
"0.64727587",
"0.6459502",
"0.64399123",
"0.6424126",
"0.63666695",
"0.63461876",
"0.63338953",
"0.63247746",
"0.6322249",
"0.6305635",
"0.6301255",
"0.62998",
"0.62880504",
"0.62686974",
"0.62549746"
] | 0.76292187 | 10 |
[ [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], [X,X,X,X,X,X,X,X,X,X], ] | def length
grid_shape.length
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def iterate(arr)\n # TODO 3\n arrNew = []\n (0...arr.count).each do |x|\n arrNew[x] = []\n (0...arr.count).each do |y|\n if num_neighbors(x, y, arr) == 3\n arrNew[x][y] = \"X\"\n else\n arrNew[x][y] = \".\"\n end\n end\n end\n\n arrNew\nend",
"def input4\n [\n [1, 0, 0, 0, 1],\n [0, 0, 0, 1, 3],\n [0, 1, 0, 0, 5],\n [0, 0, 1, 0, 8]\n ]\nend",
"def input1\n [\n [1, 0, 0, 0, 1],\n [0, 1, 0, 0, 5],\n [0, 0, 1, 0, 4],\n [0, 0, 0, 1, 3]\n ]\nend",
"def print_fine_points(points,size)\n grid = []\n for i in 0..size-1\n grid.push([])\n for j in 0..size-1\n if points.include? [j,i]\n grid.last.push(\"x\")\n else\n grid.last.push(\".\")\n end\n end\n end\n grid.reverse!\n grid.map {|x| puts x.join(\"\")}\nend",
"def print_points(points,size)\n grid = []\n for i in 0..size-1\n grid.push([])\n for j in 0..size-1\n if points.include? [j,i]\n grid.last.push(\"x\")\n else\n grid.last.push(\".\")\n end\n end\n end\n grid.reverse!\n grid.map {|x| puts x.join(\" \")}\nend",
"def create_grid(board_array)\n\t# split into triplets\n\ttriplets_array = []\n board_array.each { |array| triplets_array << array.each_slice(3).to_a }\n\n # shuffle the triplets\n shuffle_container = [[], [], []]\n 9.times do |row|\n 3.times do |column|\n current_array = triplets_array[row][column]\n shuffle_container[column] << current_array\n end\n end\n\n # flatten and re-split\n final_array = []\n shuffle_container.flatten.each_slice(9) { |array| final_array << array }\n\n return final_array\nend",
"def coordinate_list\n @board.each_index.inject([]) do |result,row_index|\n result.concat( \n @board[row_index].each_index.collect do |column_index|\n [row_index + 1, column_index + 1]\n end\n )\n end\n end",
"def square_arrays\n squares = []\n org_row = 0\n org_col = 0\n (0..8).each do |i|\n squares[i] = []\n (org_row..org_row+2).each do |k|\n (org_col..org_col+2).each do |j|\n squares[i].push(@board[k][j])\n end \n end\n if org_row == 6\n org_col += 3 \n org_row = 0\n else\n org_row += 3 \n end\n end \n squares\n end",
"def print_grid(points,size)\n grid = []\n for i in 0..size-1\n grid.push([])\n for j in 0..size-1\n if points.include? [j,i]\n # grid.last.push(\"#{j.to_s.reverse[0]}#{i.to_s.reverse[0]}\")\n grid.last.push(\"#{j}#{i}\")\n else\n grid.last.push(\"__\")\n end\n end\n end\n grid.reverse!\n grid.map {|x| puts x.join(\" \")}\nend",
"def generate_grid(board_string) current_grid = Array.new(3, Array.new(3) {\n[\"-\", \"X\"].sample })\n\n parse_char = board_string.split('')\n parse_count = 0\n\n test = current_grid.length\n\n 3.times do |row|\n 3.times do |idx|\n current_grid[row][idx] = parse_char[parse_count]\n parse_count += 1\n end\n end\n\n return current_grid\nend",
"def generate_matrix\n [].tap { |a| 25.times { a << 'X' } }.each_slice(5).to_a\n end",
"def input2\n [\n [3, 2,-4, 3],\n [2, 3, 3, 15],\n [5, -3, 1, 14]\n ]\nend",
"def initialize\n @array = []\n x,y = 0,0\n while x < 8\n while y < 8\n @array << [x,y]\n y += 1\n end\n x += 1\n y = 0\n end\n @array\n end",
"def x_points\n points = [[], []]\n (0...height).each do |y|\n (0...width).each do |x|\n if (array[y][x]).nonzero? && (x - 1 < 0 || (array[y][x - 1]).zero?)\n points[0] << Point.new(x - 1, y) + @position\n end\n\n if (array[y][x]).nonzero? && (x + 1 >= length || (array[y][x + 1]).zero?)\n points[1] << Point.new(x + 1, y) + @position\n end\n end\n end\n\n points\n end",
"def visualize_array\n(0...ROWS).each { |row|\n (0...COLS).each { |col|\n print row.to_s + \" \" + col.to_s + \" \"\n print \"(\" + (COLS*row + col).to_s + \") \" # index into board array\n }\n puts\n}\nend",
"def initialize\n flat_array = Array.new(9){ ' ' }\n # it will be easier to put X's and O's on the board\n # if I let @spaces be an array of arrays.\n @spaces = flat_array.each_slice(3).to_a\n # that was the old #rows method. \n end",
"def board_2d\n # slice the array into groups of 4 to create 2d-array\n @board.enum_slice(4).to_a\n end",
"def vertices\n vertices = []\n\n vertices << [(@origin[0] + @l/2.0), (@origin[1] + @h/2.0), (@origin[2] + @w/2.0)]\n vertices << [(@origin[0] + @l/2.0), (@origin[1] + @h/2.0), (@origin[2] - @w/2.0)]\n vertices << [(@origin[0] + @l/2.0), (@origin[1] - @h/2.0), (@origin[2] - @w/2.0)]\n vertices << [(@origin[0] + @l/2.0), (@origin[1] - @h/2.0), (@origin[2] + @w/2.0)]\n vertices << [(@origin[0] - @l/2.0), (@origin[1] - @h/2.0), (@origin[2] + @w/2.0)]\n vertices << [(@origin[0] - @l/2.0), (@origin[1] - @h/2.0), (@origin[2] - @w/2.0)]\n vertices << [(@origin[0] - @l/2.0), (@origin[1] + @h/2.0), (@origin[2] - @w/2.0)]\n vertices << [(@origin[0] - @l/2.0), (@origin[1] + @h/2.0), (@origin[2] + @w/2.0)]\n\n vertices\n end",
"def build_sample\n sample = Array.new(8) {Array.new(8)}\n sample[0][0] = King.new(0, 0, 0)\n sample[7][7] = King.new(7, 7, 1)\n sample[0][4] = Rook.new(0, 4, 1)\n sample[4][0] = Rook.new(4, 0, 1)\n sample[4][4] = Bishop.new(4, 4, 1)\n sample\nend",
"def test_with_irregular_array3D2; show([[[0,0,0]],\n [[0,0,0],[1,1,1]]]) end",
"def generateBoard()\n board = []\n\n (1..@board.rows).each do |r|\n row = []\n (1..@board.cols).each { |c| row.push('X') }\n board.push(row)\n end\n\n board\n end",
"def represent\n # grid_array will be an array of strings that represent our grid in NxN format\n grid_array=[]\n @h.times{|r| grid_array<<@grid[r*@w,@w].split('')*' '}\n grid_array\n end",
"def generate_coordinates\n coordinates = []\n (0..@column_count - 1).to_a.each do |i|\n (0..@row_count - 1).to_a.each do |j|\n coordinates << {x: i, y: j, z: 0}\n end\n end\n coordinates\n end",
"def print_coordinates\n\t\tsample_board=[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]\n\t\t(0..(sample_board.length)-1).each do |i| #we are assigning the count of each row of the array sample_board \n\t\t\t#(0,1,2,3) to the variable i\n\t\t\t(0..(sample_board[i].length)-1).each do |j| #we are assigning the count of each column of the array sample_board\n\t\t\t\t#(0,1,2,3) to the variable j\n\t\t\t\tif(sample_board[i][j]<10) #adds extra space after single digit numbers to make them the same width as double digits\n\t\t\t\t\tmarker=sample_board[i][j].to_s + \" \" #.to_s converts the output of sample_board[i][j] to a string\n\t\t\t\telse\n\t\t\t\t\tmarker=sample_board[i][j]\n\t\t\t\tend\n\t\t\t\tprint \"#{marker}\"\n\t\t\t\tprint \"|\" unless j==sample_board.length-1 #prints \"|\" after each element in the \"secondary\" arrays (i.e. rows) \n\t\t\t\t#except after the last element in each \"secondary\" array (i.e. each row). \n\t\t\tend\n\t\t\tputs \"\"\n\t\tend\n\tend",
"def nest_array(board)\n nested_board = []\n 9.times { nested_board << board.slice!(0..8)}\n return nested_board\nend",
"def test_with_irregular_array3D4; show([[[0,1,0.5,0.7]]]) end",
"def test_with_irregular_array3D1; show([[[0,0,0],[1,1,1]],\n [[0,0,0]]]) end",
"def cavity grid\n # copy_g = grid\n\n # # size = copy_g[0].length\n\n # copy_g.delete_at(size-1)\n # copy_g.delete_at(0)\n\n # copy_g[0].delete_at(size-1)\n # copy_g[0].delete_at(0)\n\n # # p copy_g\n\n # center = []\n\n # size = grid.size -1\n\n\n # (1..size).each do |i|\n # j = size - i\n # center << [i,j]\n # center << [i,j+1]\n # end\n # p center\n\ncenter = []\ncavity = []\nsize = grid.size-1\n\ncopy_g = grid\n\ngrid.each_with_index do |x, xi|\n x.each_with_index do |y, yi|\n\n\n if xi != 0 && yi != 0 && xi != size && yi != size\n # p \"element [#{xi}, #{yi}] is #{y}\"\n center << [xi, yi]\n\n if grid[xi][yi + 1] < y && grid[xi][yi - 1] < y\n cavity << [xi, yi]\n copy_g[xi][yi] = 'X'\n end\n\n end\n end\nend\n\np center\np cavity\np copy_g\n\nend",
"def two_d_translate(arr)\r\n newArray = []\r\n arr.each do |ele|\r\n ele[1].times do\r\n newArray << ele[0]\r\n end\r\n end\r\n return newArray\r\nend",
"def vertical size, coordinates\n coords = []\n sc = coordinates[0]\n coords << sc\n (size - 1).times do\n coords << sc.next\n sc = sc.next\n end\n coords.map {|x| x + \"#{coordinates[1..-1]}\"}\n end",
"def all_moves_array(initial_x, initial_y)\n\t\tfinal = []\n\t\tx = initial_x + 2\n\t\tfinal << [x, initial_y+1] << [x, initial_y-1]\n\t\tx = initial_x - 2\n\t\tfinal << [x, initial_y+1] << [x, initial_y-1]\n\t\ty = initial_y + 2\n\t\tfinal << [initial_x+1, y] << [initial_x-1, y]\n\t\ty = initial_y - 2\n\t\tfinal << [initial_x+1, y] << [initial_x-1, y]\n\t\tfinal\n\tend",
"def p11\n\tgrid = Matrix[\n\t\t[8,\t2, 22,97,38,15,0, 40,0, 75,4, 5, 7, 78,52,12,50,77,91,8],\n\t\t[49,49,99,40,17,81,18,57,60,87,17,40,98,43,69,48,4, 56,62,0],\n\t\t[81,49,31,73,55,79,14,29,93,71,40,67,53,88,30,3, 49,13,36,65],\n\t\t[52,70,95,23,4, 60,11,42,69,24,68,56,1, 32,56,71,37,2, 36,91],\n\t\t[22,31,16,71,51,67,63,89,41,92,36,54,22,40,40,28,66,33,13,80],\n\t\t[24,47,32,60,99,3, 45,2, 44,75,33,53,78,36,84,20,35,17,12,50],\n\t\t[32,98,81,28,64,23,67,10,26,38,40,67,59,54,70,66,18,38,64,70],\n\t\t[67,26,20,68,2, 62,12,20,95,63,94,39,63,8, 40,91,66,49,94,21],\n\t\t[24,55,58,5, 66,73,99,26,97,17,78,78,96,83,14,88,34,89,63,72],\n\t\t[21,36,23,9, 75,0, 76,44,20,45,35,14,0, 61,33,97,34,31,33,95],\n\t\t[78,17,53,28,22,75,31,67,15,94,3, 80,4, 62,16,14,9, 53,56,92],\n\t\t[16,39,5, 42,96,35,31,47,55,58,88,24,0, 17,54,24,36,29,85,57],\n\t\t[86,56,0, 48,35,71,89,7, 5, 44,44,37,44,60,21,58,51,54,17,58],\n\t\t[19,80,81,68,5, 94,47,69,28,73,92,13,86,52,17,77,4, 89,55,40],\n\t\t[4,\t52,8, 83,97,35,99,16,7, 97,57,32,16,26,26,79,33,27,98,66],\n\t\t[88,36,68,87,57,62,20,72,3, 46,33,67,46,55,12,32,63,93,53,69],\n\t\t[4,\t42,16,73,38,25,39,11,24,94,72,18,8, 46,29,32,40,62,76,36],\n\t\t[20,69,36,41,72,30,23,88,34,62,99,69,82,67,59,85,74,4, 36,16],\n\t\t[20,73,35,29,78,31,90,1, 74,31,49,71,48,86,81,16,23,57,5, 54],\n\t\t[1,\t70,54,71,83,51,54,69,16,92,33,48,61,43,52,1, 89,19,67,48]\n\t]\n\tproducts = []\n\t(0...grid.row_count).each do |row|\n\t\t(0...grid.column_count).each do |col|\n\t\t\tright = col + 3 < grid.row_count\n\t\t\tdown = row + 3 < grid.column_count\n\t\t\tleft = col - 3 >= 0\n\t\t\tif right\n\t\t\t\tset = grid.minor(row..row,col..col+3)\n\t\t\t\tproducts << set.reduce(:*)\n\t\t\tend\n\t\t\tif down and right\n\t\t\t\tdiagonal = []\n\t\t\t\t(0..3).each do |x|\n\t\t\t\t\tdiagonal << grid.minor(row+x..row+x,col+x..col+x).component(0,0)\n\t\t\t\tend\n\t\t\t\tproducts << diagonal.reduce(:*)\n\t\t\tend\n\t\t\tif down\n\t\t\t\tset = grid.minor(row..row+3,col..col)\n\t\t\t\tproducts << set.reduce(:*)\n\t\t\tend\n\t\t\tif down and left\n\t\t\t\tdiagonal = []\n\t\t\t\t(0..3).each do |x|\n\t\t\t\t\tdiagonal << grid.minor(row+x..row+x,col-x..col-x).component(0,0)\n\t\t\t\tend\n\t\t\t\tproducts << diagonal.reduce(:*)\n\t\t\tend\n\t\tend\n\tend\n\tproducts.max\nend",
"def two_d_translate(array)\n new_array = []\n array.each do |sub_array|\n num = sub_array[1]\n ele = sub_array[0]\n num.times { new_array << ele }\n end\n\n new_array\nend",
"def initialize\n @board = Array.new(3) { Array.new(3) }\n end",
"def initialize\n @board = Array.new(3) { Array.new(3) }\n end",
"def minesweeper(matrix)\n height = matrix.count - 1\n width = matrix[0].count - 1\n\n finalArray = Array.new\n \n for i in 0..height\n temp = Array.new\n for j in 0..width\n temp << check33(matrix, j, i)\n end\n finalArray << temp\n end\n finalArray\nend",
"def initialize\n @piece_count = 0\n\n @grid = []\n (0..6).each do |col|\n @grid[col] = []\n (0..5).each do |row|\n @grid[col][row] = \"-\"\n end\n end\n end",
"def three_row_grid\n grid = []\n grid << [\n Cell.new(:alive, 0, 0),\n Cell.new(:alive, 0, 1),\n Cell.new(:dead, 0, 2)\n ]\n grid << [\n Cell.new(:alive, 1, 0),\n Cell.new(:dead, 1, 1),\n Cell.new(:dead, 1, 2)\n ]\n grid << [\n Cell.new(:dead, 2, 0),\n Cell.new(:dead, 2, 1),\n Cell.new(:dead, 2, 2)\n ]\n grid\nend",
"def test_with_irregular_array2D2; show([[0.5], [0.1,0.9]]) end",
"def win_possibilities(piece)\n\t\t[ [ [piece[0],piece[1]],[piece[0]+1,piece[1]],[piece[0]+2,piece[1]],[piece[0]+3,piece[1]] ],\n\t\t[ [piece[0]-1,piece[1]],[piece[0],piece[1]],[piece[0]+1,piece[1]],[piece[0]+2,piece[1]] ],\n\t\t[ [piece[0]-2,piece[1]],[piece[0]-1,piece[1]],[piece[0],piece[1]],[piece[0]+1,piece[1]] ],\n\t\t[ [piece[0]-3,piece[1]],[piece[0]-2,piece[1]],[piece[0]-1,piece[1]],[piece[0],piece[1]] ],\n\t\t[ [piece[0],piece[1]],[piece[0],piece[1]+1],[piece[0],piece[1]+2],[piece[0],piece[1]+3] ],\n\t\t[ [piece[0],piece[1]-1],[piece[0],piece[1]],[piece[0],piece[1]+1],[piece[0],piece[1]+2] ],\n\t\t[ [piece[0],piece[1]-2],[piece[0],piece[1]-1],[piece[0],piece[1]],[piece[0],piece[1]+1] ],\n\t\t[ [piece[0],piece[1]-3],[piece[0],piece[1]-2],[piece[0],piece[1]-1],[piece[0],piece[1]] ],\n\t\t[ [piece[0],piece[1]],[piece[0]+1,piece[1]+1],[piece[0]+2,piece[1]+2],[piece[0]+3,piece[1]+3] ],\n\t\t[ [piece[0]-1,piece[1]-1],[piece[0],piece[1]],[piece[0]+1,piece[1]+1],[piece[0]+2,piece[1]+2] ],\n\t\t[ [piece[0]-2,piece[1]-2],[piece[0]-1,piece[1]-1],[piece[0],piece[1]],[piece[0]+1,piece[1]+1] ],\n\t\t[ [piece[0]-3,piece[1]-3],[piece[0]-2,piece[1]-2],[piece[0]-1,piece[1]-1],[piece[0],piece[1]] ],\n\t\t[ [piece[0],piece[1]],[piece[0]+1,piece[1]-1],[piece[0]+2,piece[1]-2],[piece[0]+3,piece[1]-3] ],\n\t\t[ [piece[0]-1,piece[1]+1],[piece[0],piece[1]],[piece[0]+1,piece[1]-1],[piece[0]+2,piece[1]-2] ],\n\t\t[ [piece[0]-2,piece[1]+2],[piece[0]-1,piece[1]+1],[piece[0],piece[1]],[piece[0]+1,piece[1]-1] ],\n\t\t[ [piece[0]-3,piece[1]+3],[piece[0]-2,piece[1]+2],[piece[0]-1,piece[1]+1],[piece[0],piece[1]] ] ]\n\tend",
"def victoire\n \n [[@a1, @a2, @a3],\n [@a1, @b2, @c3],\n [@a1, @b1, @c1],\n [@b1, @b2, @b3],\n [@c1, @c2, @c3],\n [@c1, @b2, @a3],\n [@a2, @b2, @c2],\n [@a3, @b3, @c3]]\n end",
"def coords\n coord_list = []\n (@x..(@x + @size_x - 1)).each do |i|\n (@y..(@y + @size_y - 1)).each do |j|\n coord = [i, j]\n coord_list << coord\n end\n end\n\n return coord_list\n end",
"def int_to_chess(number_list)\n # if its just one element return an array inside an array\n chess_notation = []\n number_list.each do |square|\n square_notation = []\n letter = num_to_letter(square[0])\n number = square[1]\n square_notation.push(letter, number)\n chess_notation.push(square_notation)\n end\n # todo: flatten if theres only one element inside!\n chess_notation\nend",
"def grid(n, m)\n Array.new(n) { Array.new(n) } # If you attempted to write this as Array.new(n, Array.new(m)) the contents would be repeated for each array rather\nend",
"def default_grid\n array = Array.new(8) { Array.new(8) }\n\n array[0][0] = Rook.new('white', [0,0], 'slide')\n array[1][0] = Knight.new('white', [1,0], 'step')\n array[2][0] = Bishop.new('white', [2,0], 'slide')\n array[3][0] = Queen.new('white', [3,0], 'slide')\n array[4][0] = King.new('white', [4,0], 'step')\n array[5][0] = Bishop.new('white', [5,0], 'slide')\n array[6][0] = Knight.new('white', [6,0], 'step')\n array[7][0] = Rook.new('white', [7,0], 'slide')\n array[0..7].each_with_index { |column, index| \n column[1] = Pawn.new('white', [index,1], 'step') }\n\n array[0][7] = Rook.new('black', [0,7], 'slide')\n array[1][7] = Knight.new('black', [1,7], 'step')\n array[2][7] = Bishop.new('black', [2,7], 'slide')\n array[3][7] = Queen.new('black', [3,7], 'slide')\n array[4][7] = King.new('black', [4,7], 'step')\n array[5][7] = Bishop.new('black', [5,7], 'slide')\n array[6][7] = Knight.new('black', [6,7], 'step')\n array[7][7] = Rook.new('black', [7,7], 'slide')\n array[0..7].each_with_index { |column, index| \n column[6] = Pawn.new('black', [index,6], 'step') }\n\n array\n end",
"def generate_grid x = @x, y = @y\n new_grid = []\n\n y.times { new_grid << [] }\n new_grid.each do |array|\n x.times do\n array << []\n end\n end\n end",
"def two_d_translate(array)\n new_arr = []\n array.each do |arr|\n arr[1].times {new_arr << arr[0]}\n end\n new_arr\nend",
"def test_with_irregular_array2D1; show([[0.1,0.9], [0.5]]) end",
"def create_test_array(size)\n g = []\n count = 1\n (0..size-1).each do |i|\n g[i] = []\n (0..size-1).each do |j|\n g[i][j] = count\n count += 1\n end\n end\n g\nend",
"def two_d_translate(arr)\n result = []\n\tarr.each do |subArray|\n subArray[1].times do \n result.push(subArray[0])\n end\n end\n return result\nend",
"def create_representation(pieces_position_list)\n arr = (0..6).to_a.map { |x| [] }\n positions = ('A'..'G').to_a.each_with_index.reduce({}) { |x, i| x.merge(Hash[*i]) }\n pieces_position_list.reduce(arr) do |x, m|\n (pos, color) = m.split(\"_\")\n arr[positions[pos]] << color\n arr\n end\nend",
"def generate_grid\n grid = []\n @y.times do\n row = []\n @x.times do\n row << nil\n end\n grid << row\n end\n grid\n end",
"def initializeMaze(size)\n return Array.new(size[0] + 1){Array.new(size[1] + 1, '#')}\nend",
"def two_d_translate(arr)\n new_arr = []\n\n arr.each do |subArray|\n ele = subArray[0]\n num = subArray[1]\n\n num.times { new_arr << ele }\n end\n\n return new_arr\nend",
"def fixed_array\n reformated_array.map! do |element|\n element.join(\", \").split(\", \")\n end \nend",
"def matrix(x, y, z)\n sub_array = []\n y.times do\n sub_array.push(z);\n end\n #sub_array\n array = [];\n x.times do\n array.push(sub_array)\n end\n array\nend",
"def printGrid(arr)\n arr.each do |a|\n puts a.inspect\n end\nend",
"def greedy(rows, cols)\n board = Array.new(rows.length) { Array.new(cols.length) }\n rows.each_with_index { |r, r_idx|\n cols.each_with_index { |c, c_idx|\n board[r_idx][c_idx] = if (r > 0 and c > 0)\n rows[r_idx] -= 1\n cols[c_idx] -= 1\n 'x'\n else\n 'o'\n end\n }\n }\n board\nend",
"def test_with_irregular_array3D3; show([[[0,1]]]) end",
"def generate_surrounding_spaces(x, y)\n surround_array = []\n# Loop over 3x3 grid\n for i in x-1..x+1\n for j in y-1..y+1\n # Exclude the center square and ones not on the grid\n if ((i==x) && (j==y)) || !is_on_grid(i,j)\n next\n end\n surround_array.push([j, i])\n end\n end\n\n return surround_array\nend",
"def iterate(arr)\n newArr = []\n (0...arr.size).each do |row|\n newArr[row] = []\n (0...arr.size).each do |col|\n num_neighbors = num_neighbors(row,col,arr)\n if arr[row][col]\n if num_neighbors == 2 || num_neighbors == 3\n newArr[row][col] = true\n else\n newArr[row][col] = false\n end\n else\n if num_neighbors == 3\n newArr[row][col] = true\n else\n newArr[row][col] = false\n end\n end\n end\n end\n arr = newArr\nend",
"def initialize\n @grid = []\n (0..5).each do |row|\n @grid[row] = []\n (0..6).each do |col|\n @grid[row][col] = \"_ \"\n end\n end\n end",
"def print_2d_array(a)\r\n\r\n max_word_length = 0 # keeps track of the longest string in layout\r\n flattened = a.flatten\r\n flattened.each{|x| if x.length > max_word_length\r\n max_word_length = x.length \r\n end}\r\n max_word_length += 1 # we want a space in front when printing\r\n \r\n # insert a row of numbers into layout for printing\r\n max_no_columns = 0\r\n a.each{|x| if x.length > max_no_columns\r\n max_no_columns = x.length\r\n end}\r\n counter_array = [*0...max_no_columns] # [0, 1, 2, 3....max_no_columns]\r\n \r\n # clone, insert numbers into array, then print\r\n temp = Marshal.load(Marshal.dump(a))\r\n temp.unshift(counter_array) # insert in front\r\n print_array(temp, max_word_length) \r\nend",
"def array\n \t(1..size).map{ |i| \"#{fill}\" }\n end",
"def tile_board\n @board_array.each_with_index do |row_array, row|\n 10.times{|column| row_array << Tile.new(row,column)}\n end\n end",
"def rect(a, b, screen)\n screen[0...b].each do |row|\n row[0...a] = (\"#\"*a).split(\"\")\n end\n\n screen\nend",
"def get_board(width, height)\n #array with all possible colors\n colors = [:red, :blue, :green, :yellow, :cyan, :magenta]\n\n board = Array.new(height)\n (0..height-1).each do |i|\n board[i] = Array.new(width)\n (0..width-1).each do |j|\n board[i][j] = colors.sample\n end\n end\n\n return board\nend",
"def make_grid()\n grid = Array.new(GRID_SIZE) { Array.new(GRID_SIZE) }\n ## Build init grid\n x = 1\n y = 1\n File.readlines(FILENAME).each do |line|\n x = 1\n line.chars.each do |char|\n grid[y][x] = char\n x += 1\n end\n y += 1\n end\n grid\nend",
"def board(array)\n puts \" #{array[0][0]} | #{array[0][1]} | #{array[0][2]}\"\n puts \"-----------\"\n puts \" #{array[1][0]} | #{array[1][1]} | #{array[1][2]}\"\n puts \"-----------\"\n puts \" #{array[2][0]} | #{array[2][1]} | #{array[2][2]}\\n\\n\"\nend",
"def walk(grid, x, y)\n [N, S, E, W].shuffle.each do |dir|\n nx, ny = x + DX[dir], y + DY[dir]\n if nx >= 0 && ny >= 0 && ny < grid.length && nx < grid[ny].length && grid[ny][nx] == 0\n grid[y][x] |= dir\n grid[ny][nx] |= OPPOSITE[dir]\n \n return [nx, ny]\n end\n end\n \n nil\nend",
"def squares\r\n [@a1, @a2, @a3, @b1, @b2, @b3, @c1, @c2, @c3]\r\n \r\nend",
"def new_groups(array)\n new_group = []\n array.shuffle.each_slice(4){|acc| new_group << acc}\n if new_group.length > 1 && new_group.last.length <= 2\n (new_group.last.length).times do |i|\n new_group[i].push(new_group.last.pop)\n new_group.pop\n end\n end\n new_group\nend",
"def discover_points # step 1\r\n x = 0\r\n y = 0\r\n @image_arr.each do |row|\r\n x = 0\r\n row.each do |cell|\r\n if cell == 1 # discovered the cell is 1\r\n @ordinal_arr.push([y,x]) # this is where i push the ordinals.\r\n puts \"#{y},#{x}\"\r\n end\r\n x = x + 1\r\n end\r\n y = y + 1\r\n puts \"\" \r\n end\r\n end",
"def board_creator()\n board = []\n 10.times do |x|\n board << Array.new\n 10.times do |y|\n board[x] << \"sea #{x} #{y}\"\n end\n end\n return board\nend",
"def make_board\n board = Array.new(8) { Array.new(8) { Array.new(2) } }\n board.each_with_index do |row, row_i|\n row.each_with_index { |column, column_i| column[0], column[1] = row_i, column_i }\n end\n end",
"def _get_board_squares(square_starting_points)\n array_of_squares = []\n square_starting_points.each do |starting_point|\n row_index = starting_point[0]\n column_index = starting_point[1]\n 3.times do\n 3.times do\n array_of_squares << sudoku_board[row_index][column_index]\n column_index += 1\n end\n row_index += 1\n column_index = starting_point[1]\n end \n end\n array_of_squares.each_slice(9).to_a\n end",
"def to_list\n retval = []\n 1.upto(@tiles.length - 1) do |row|\n 1.upto(@tiles[row].length - 1) do |col|\n retval << @tiles[row][col]\n end\n end\n retval\n end",
"def trans(input)\n\trows, columns = input.count, input.first.count\n\tx, y = [], []\n\ti = j = 0\n\twhile j < columns do\n\t\twhile i <= rows - 1 do\n\t\t\tx.push(input[i][j])\n\t\t\ti += 1\n\t\tend\n\t\tj += 1 #go to next column\n\t\ti = 0 #reset the inner counter to go back to 1st array\n\tend\n\tx.each_slice(rows).each.map{|e| y.push(e)}\n\treturn y\nend",
"def initialize\n @coordinates = Array.new(2)\n @piece = nil\n end",
"def magic_array (liste)\n\treturn liste.flatten.collect {|x| x * 2} .delete_if { |x| x%3 == 0 } .uniq .sort\nend",
"def lcts(array)\nend",
"def two_d_translate(arr)\n one_d_arr = []\n arr.each do | outer |\n outer.each.with_index do | inner, i |\n if i == 0\n outer[1].times do\n one_d_arr << inner\n end\n end\n end\n end\n return one_d_arr\nend",
"def print\n grid_array = []\n @height.times do |r|\n grid_array << @grid[r * @width, @width].split('') * ' '\n end\n grid_array\n end",
"def print_neighbours\n @x.times{ |r|\n @y.times{|c| \n #+1,+1 0,+1 +1,0\n print @mat[r+1].nil? || @mat[r+1][c+1].nil? ? \"x \" : \"- \"\n print @mat[r].nil? || @mat[r][c+1].nil? ? \"x \" : \"- \"\n print @mat[r+1].nil? || @mat[r+1][c].nil? ? \"x \" : \"- \"\n \n #-1,-1 0,-1 -1,0\n print @mat[r-1].nil? || @mat[r-1][c-1].nil? ? \"x \" : \"- \"\n print @mat[r-1].nil? || @mat[r-1][c].nil? ? \"x \" : \"- \"\n print @mat[r].nil? || @mat[r][c-1].nil? ? \"x \" : \"- \"\n \n #+1,-1 -1,+1\n print @mat[r-1].nil? || @mat[r-1][c+1].nil? ? \"x \" : \"- \"\n print @mat[r+1].nil? || @mat[r+1][c-1].nil? ? \"x \" : \"- \"\n print \"\\n\"\n \n }\n \n } \n end",
"def create_arr(prng, size, percent)\n arr = []\n (0...size).each do |x|\n arr[x] = []\n (0...size).each do |y|\n if prng.rand(101) <= percent\n arr[x][y] = \"X\"\n else\n arr[x][y] = \".\"\n end\n end\n end\n arr\nend",
"def create_position_array()\n array_2D = []\n array_1D = []\n\n (0...@row).each_with_index do |value, row_index|\n (0...@col).each_with_index { |value, col_index| array_1D.append(value+(row_index*@col)) }\n array_2D.append(array_1D)\n array_1D = []\n end\n\n return array_2D\n end",
"def create_grid\n grid = Array.new(8) { Array.new(8) { [] } }\n grid.map! do |row|\n if grid.index(row).even?\n row.each { |space| row.index(space).even? ? space << 'white' : space << 'black' }\n else\n row.each { |space| row.index(space).even? ? space << 'black' : space << 'white' }\n end\n row.map! do |space|\n space << [grid.index(row), row.index(space)]\n end\n end\n grid.reverse\n end",
"def build_bird(arr, dir, x, y)\n if x == 0 and y == 0\n\n row = Hash.new\n row[\"x\"] = 24\n row[\"y\"] = 6\n row[\"str\"] = \"/ # \\\\\"\n arr.push(row)\n\n row = Hash.new\n row[\"x\"] = 26\n row[\"y\"] = 5\n row[\"str\"] = \"__\"\n arr.push(row)\n\n row = Hash.new\n row[\"x\"] = 23\n row[\"y\"] = 7\n row[\"str\"] = \"|█| ==\"\n arr.push(row)\n\n row = Hash.new\n row[\"x\"] = 24\n row[\"y\"] = 8\n row[\"str\"] = \"\\\\ /\"\n arr.push(row)\n\n row = Hash.new\n row[\"x\"] = 26\n row[\"y\"] = 8\n row[\"str\"] = \"__\"\n arr.push(row)\n else\n if dir.to_i == 1\n\n arr.each do |row|\n row[\"x\"] = row[\"x\"].to_i - x.to_i\n row[\"y\"] = row[\"y\"].to_i - y.to_i\n end\n\n else\n\n arr.each do |row|\n row[\"x\"] = row[\"x\"].to_i + x.to_i\n row[\"y\"] = row[\"y\"].to_i + y.to_i\n end\n\n end\n end\nend",
"def custom_primer_exp_2\n [[0,0],[0,1],[0,2],[0,3],[0,4],[0,5],[0,6],[0,7],[0,8],[0,9],\n [4,0],[4,1],[4,2],[4,3],[4,4],[4,5],[4,6],[4,7],[4,8],[4,9],\n [1,0],[1,1],[1,2],[1,3],[1,4],[1,5],[1,6],[1,7],[1,8],[1,9],\n [5,0],[5,1],[5,2],[5,3],[5,4],[5,5],[5,6],[5,7],[5,8],[5,9],\n [2,0],[2,1],[2,2],[2,3],[2,4],[2,5],[2,6],[2,7],[2,8],[2,9],\n [6,0],[6,1],[6,2],[6,3],[6,4],[6,5],[6,6],[6,7],[6,8],[6,9]] \n end",
"def eight_queens_possibilities(current_row, taken_columns, positions)\n\nend",
"def create_board(size)\n return (0...size).map{|b| (0...size).map{|bb| 0}}\n end",
"def find_all_sequences(x,y)\n ret = []\n ret << [[x,y],[x+1,y],[x+2,y],[x+3,y]] if x+3 <= 19\n ret << [[x,y],[x,y+1],[x,y+2],[x,y+3]] if y+3 <= 19\n ret << [[x,y],[x+1,y+1],[x+2,y+2],[x+3,y+3]] if y+3 <= 19 && x+3 <= 19\n ret << [[x,y],[x-1,y+1],[x-2,y+2],[x-3,y+3]] if x-3 >= 0 && y+3 <= 19\n ret\nend",
"def squares\n [@a1, @a2, @a3, @b1, @b2, @b3, @c1, @c2, @c3]\n\nend",
"def coord(a, b)\n final_array = []\n first_array = (1..a).to_a\n second_array = (1..b).to_a\n first_array.each do |first_num|\n second_array.each do |second_num|\n coordinates = \"(#{first_num}, #{second_num})\"\n final_array.push(coordinates)\n end\n end\n final_array \nend",
"def draw_new\n\t @board=Array.new(@y) {Array.new(@x){'*'}}\n\t \n end",
"def fill_board(array)\n # puts array.inspect\n \n empty_board = BoardLoader.empty_board\n b = []\n \n count = 0\n empty_board.each_index do |row|\n b[row] = []\n empty_board[row].each_index do |column|\n if empty_board[row][column] == BLANK_SPACE\n if count < array.length\n b[row][column] = array[count]\n count += 1\n if count == array.length\n $last_spot = [row, column]\n end\n else\n b[row][column] = BLANK_SPACE\n end\n else\n b[row][column] = empty_board[row][column]\n end\n end\n end\n \n if $last_spot.nil?\n $last_spot = [empty_board.length-1, empty_board[0].length-1]\n end\n \n \n b\n end",
"def create_board(array)\n num = array.length\n board = Array.new(num) { Array.new(num, false) }\n array.each_with_index { |board_position, row| board[row][board_position - 1] = true }\n board\nend",
"def flat_board\n board.flatten\n end",
"def print_arr(arr)\n # TODO 1\n (0...arr.length).each do |x|\n (0...arr.length).each do |y|\n print arr[x][y]\n print \" \"\n end\n print \"\\n\"\n end\nend",
"def groupCreator(array)\n\t3.times do\n\t\tarray.shuffle!\n\t\t\tnewArray=array.each_slice(4).to_a\n\t\t\tnewArray.each do |x|\n\t\t\t\tif(x.length<4)\n\t\t\t\t\ti=0\n\t\t\t\t\tuntil x.length<=0\t\n\t\t\t\t\t\tnewArray[i]<<x.pop\n\t\t\t\t\t\ti+=1\n\t\t\t\t\tend\n\t\t\t\t\tnewArray.pop\n\t\t\t\tend\n\t\t\tend\n\t\t\tp newArray\n\tend\nend",
"def tiles\n @shape.values.map { |v| v.values }.flatten.select {|x| x != :empty}\n end"
] | [
"0.66918766",
"0.6473316",
"0.64554584",
"0.62220085",
"0.6184812",
"0.6164545",
"0.61179817",
"0.6057151",
"0.6052865",
"0.6045232",
"0.6038872",
"0.6011229",
"0.59992135",
"0.5983799",
"0.59737647",
"0.59647954",
"0.5947121",
"0.589279",
"0.5868995",
"0.58670413",
"0.5859153",
"0.5858886",
"0.585631",
"0.5832752",
"0.5831472",
"0.5829803",
"0.5829773",
"0.57803124",
"0.5777354",
"0.57687354",
"0.5763705",
"0.5748799",
"0.57465464",
"0.5743184",
"0.5743184",
"0.57339746",
"0.5732691",
"0.570611",
"0.57029647",
"0.5696093",
"0.56877226",
"0.56860906",
"0.5671476",
"0.5661889",
"0.56606716",
"0.5659639",
"0.5659258",
"0.56461626",
"0.5640846",
"0.56384397",
"0.56324553",
"0.5612493",
"0.56109107",
"0.5609292",
"0.5607258",
"0.5606444",
"0.56034493",
"0.56026745",
"0.559606",
"0.55959773",
"0.5575272",
"0.5569385",
"0.55651706",
"0.55635256",
"0.5550678",
"0.5547849",
"0.554277",
"0.5535072",
"0.5529438",
"0.55225325",
"0.55146784",
"0.55011976",
"0.54958785",
"0.5487926",
"0.54841673",
"0.5482061",
"0.54792607",
"0.5478156",
"0.54754484",
"0.547121",
"0.5469502",
"0.5469225",
"0.54550606",
"0.54500157",
"0.5442122",
"0.543734",
"0.54316247",
"0.5429305",
"0.5421594",
"0.5416872",
"0.5416758",
"0.54118586",
"0.54037964",
"0.53975576",
"0.5395361",
"0.5392677",
"0.5391675",
"0.5390739",
"0.53900194",
"0.5387462",
"0.53828454"
] | 0.0 | -1 |
Remove notifications from problem editors | def remove_problem_upvote_notifications
((@resource.versions.map{ |version| version.user }).uniq - [@actor]).each do |editor|
n = Notification.find_by(recipient: editor,
actor: @actor,
notifiable: @resource,
action_type: "like")
if !n.nil?
n.destroy
@removed += 1
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_from_notifications\n notifications.find_all { |n| n.noti_read == 'N' }.each do |n|\n remove_from_notification(n)\n end # each n\n end",
"def remove_proof_upvote_notifications\n ((@resource.versions.map{ |version| version.user }).uniq - [@actor]).each do |editor|\n n = Notification.find_by(recipient: @resource.user, actor: @actor, \n action_type: \"like\", \n notifiable: @resource) if @resource.user != @actor\n if !n.nil?\n n.destroy\n @removed += 1\n end\n end\n end",
"def destroy_notifications\n Notification.where(notifiable_id: @lecture.id, notifiable_type: 'Lecture')\n .delete_all\n end",
"def clear_notifications\n Notification.where(actor_id: id).destroy_all\n end",
"def delete_notifications\n Notification.where(origin_type: 'Message', origin_id: @message.id).destroy_all\n end",
"def fRemoveNotificationsFrom (email)\n @users.removeNotificationsFrom(email)\n end",
"def run_on_removal(paths)\n @builder.notify\n end",
"def remove_notifier(notifier)\n @notifiers.delete(notifier)\n end",
"def strip_removed_issues!\n removed_issues.each { |issue| issue.update!(review_request: nil) }\n end",
"def delete_notifier(name)\n @notifiers.delete(name) if defined? @notifiers\n end",
"def exec_commande_notifications\n # On affiche toujours la liste des notifications\n clear\n notifications.wait_for_choix_and_execute\n end",
"def delete_notifications\n @tutor = Tutor.find(params[:id])\n Notification.where(user_type: 'Tutor', user_id: @tutor.id).destroy_all\n end",
"def remove_comment_upvote_notifications\n n = Notification.find_by(recipient: @resource.user, actor: @actor, \n action_type: \"like\",\n notifiable: @resource) if @resource.user != @actor\n if !n.nil?\n n.destroy\n @removed += 1\n end\n end",
"def remove_invalid_emails\n notification_emails.each do |email|\n if !email.valid?\n notification_emails.delete email\n end\n end\n end",
"def remove_remaining_notification_settings\n source_project.notification_settings.destroy_all # rubocop: disable DestroyAll\n end",
"def send_deletion_notification\n @notifiable = self\n @tutor = User.find(self.user_id)\n @student = User.find(self.pupil_id)\n Notification.create(:user => @tutor, :receiver_id => @student.id, :message => @tutor.title + ' has removed you from their student list')\n end",
"def index\n @notifications = Notification.where user_id: current_user.id, seen: nil\n @notifications = @notifications.sort { | x , y | x.seen? ? 1:0 <=> y.seen? ? 0:1}\n\n @notifications.each do |msg|\n msg.destroy\n end\n\n end",
"def remove_from_notification (n)\n logger.debug2 \"comment id #{id}. Notification id #{n.id}. notification key #{n.noti_key}\" if debug_notifications\n # only modify unread notifications\n return unless n.noti_read == 'N'\n cn = notifications.where(\"notification_id = ?\", n.id).first\n logger.debug2 \"cn.class = #{cn.class}\" if debug_notifications\n logger.debug2 \"cn.id = #{cn.id}\" if cn and debug_notifications\n logger.debug2 \"cn.noti_key = #{cn.noti_key}\" if cn and debug_notifications\n logger.debug2 \"cn.from_user.short_user_name = #{cn.from_user.short_user_name}\" if cn and cn.from_user and debug_notifications\n logger.debug2 \"cn.to_user.short_user_name = #{cn.to_user.short_user_name}\" if cn and cn.to_user and debug_notifications\n # find no users before and after removing this comment from notification\n old_no_users = n.api_comments.collect { |c| c.user_id }.uniq.size\n new_users = n.api_comments.find_all { |ac| ac.id != id }.collect { |ac| ac.user }.uniq\n new_no_users = new_users.size\n if new_no_users == 0\n # last user for this unread notification has been removed\n logger.debug2 \"last user for this unread notification has been removed\" if debug_notifications\n n.destroy!\n return\n end\n return if old_no_users == new_no_users # unchanged number of users => unchanged notification\n if new_no_users > 3\n # unchanged noti_key and username array. Just change number of users\n logger.debug2 \"unchanged noti_key and username array. Just change number of users\" if debug_notifications\n notifications.delete(cn) if cn\n noti_options = n.noti_options\n noti_options[:no_users] = new_no_users\n noti_options[:no_other_users] = new_no_users - 2\n n.noti_options = noti_options\n n.save!\n return\n end\n # change noti_key, username array and number of users\n if n.noti_key !~ /^([a-z_]+)_(\\d)_v(\\d+)$/\n logger.debug2 \"invalid noti key format. noti key = #{noti_key}\"\n return\n end\n logger.debug2 \"change noti_key, username array and number of users\" if debug_notifications\n noti_key_prefix, noti_key_no_users, noti_key_version = $1, $2, $3\n noti_options = n.noti_options\n (1..3).each { |i| noti_options[\"username#{i}\".to_sym] = nil }\n usernames = new_users.collect { |u| u.short_user_name }\n 0.upto(usernames.size-1).each do |i|\n noti_options[\"username#{i+1}\".to_sym] = usernames[i]\n end\n noti_options[:no_users] = new_no_users\n noti_options[:no_other_users] = new_no_users - 2\n n.noti_key = \"#{noti_key_prefix}_#{new_no_users}_v#{noti_key_version}\"\n logger.debug2 \"noti_key: old = #{n.noti_key_was}, new = #{n.noti_key}\" if debug_notifications\n n.noti_options = noti_options\n notifications.delete(cn) if cn\n n.save!\n end",
"def run_on_removal(paths)\n super\n end",
"def destroy\n @notification.destroy\n end",
"def clear_registered_linters\n registered_linters.clear\n end",
"def down\n change_column_null :notifications, :notification_content, false\n end",
"def destroy\n \n \n #remove notification \n #remove plots \n\n\n if current_user.id==@tale.user_id\n notifications = Journal.where(notification_type: \"StoryCreate\", notification_id: @tale.id)\n notifications.each do|notification|\n notification.delete\n end \n\n\n @tale.destroy\n\n\n\n\n\n\n respond_to do |format|\n #format.html { redirect_to tales_url }\n format.html { redirect_to profiles_index_path(current_user.id) }\n format.json { head :no_content }\n end\n end\n\n\n end",
"def reset_notifier!\n notifier.reset!\n end",
"def resource_context_destroy\n # only notify for customizations being deleted, not other kinds\n if !self.institution_id.nil? && self.requirement_id.nil? && self.resource_id.nil?\n institution = self.institution\n template = self.requirements_template\n users = institution.users_in_and_above_inst_in_role(Role::RESOURCE_EDITOR)\n users += institution.users_in_and_above_inst_in_role(Role::INSTITUTIONAL_ADMIN)\n users.uniq! #only the unique users, so each user is only listed once\n users.delete_if {|u| !u[:prefs][:resource_editors][:deleted] }\n users.each do |user|\n UsersMailer.notification(\n user.email,\n \"DMP Template Customization Deleted: #{template.name}\",\n \"resource_editors_deleted\",\n {:user => user, :customization => self} ).deliver\n end\n end\n end",
"def notifications\n end",
"def skip_email_changed_notification!; end",
"def clear_notifier!\n self.notifier = nil\n self.root_notifier = nil\n end",
"def free_resources\n\t\tunset_vim_event_hooks\n\tend",
"def deleteEventNotifications(u, e)\n @ns = getEventNotifications(u, e)\n @ns.each do |n|\n n.destroy\n end\n if u != nil\n redirect_to \"/notifications\", notice: \"All notifications for \" + Event.find_by(id: e).name + \" deleted.\"\n end\n end",
"def run_on_removals(paths)\n paths.each do |path|\n warn \"file #{path} removed -- it's up to you to remove it from the server if desired\"\n end\n end",
"def after_destroy(post)\n post = post.to_post\n Notification.where(:scope => 'mention',\n :source_ids => {'Post' => post.id}).each do |notification|\n notification.remove_source(post)\n notification.update_actors\n end\n end",
"def cleanup\n\t\t\tself.framework.events.remove_session_subscriber(self)\n\t\t\tremove_console_dispatcher('notify')\n\t\tend",
"def cleanup\n\t\t\tself.framework.events.remove_session_subscriber(self)\n\t\t\tremove_console_dispatcher('notify')\n\t\tend",
"def clean!\n @changes = []\n end",
"def clean_for_period(future_period, notification_times)\n future_period.notifications.all.each do |existing_notification|\n unless notification_times.include?(existing_notification.time.utc)\n existing_notification.delete\n end\n end\n end",
"def unwatch_all\n # Remove observers for all entity types.\n @observers.each{ |observed, observer|\n observed.remove_observer(observer)\n }\n @observers.clear\n end",
"def remove_idea_events(events)\n events.delete_if(&:idea?)\nend",
"def RemoveObsoleteResolvables\n Builtins.y2milestone(\"--------- removing obsolete selections ---------\")\n\n # this removes only information about selections and applied patches\n # it doesn't remove any package\n Builtins.y2milestone(\n \"Removing all information about selections and patches in %1\",\n Installation.destdir\n )\n Pkg.TargetStoreRemove(Installation.destdir, :selection)\n\n # disabled by FATE #301990, bugzilla #238488\n # Pkg::TargetStoreRemove (Installation::destdir, `patch);\n\n Builtins.y2milestone(\"--------- removing obsolete selections ---------\")\n\n nil\n end",
"def removeWarnings!\n @pages.reject! { |page, entries| entries.any? { |entry| entry[:type] == Check::WARNING } }\n end",
"def destroy_with_notifications\n errors.clear\n if self.new_record?\n errors.add(:base, :problem_destroying)\n return false\n end\n resp = false\n ActiveRecord::Base.transaction do\n DocumentsSlide.joins(:slide, {:slide => :lesson}).select('lessons.user_id AS my_user_id, lessons.title AS lesson_title, lessons.id AS lesson_id').group('lessons.id').where('documents_slides.document_id = ?', self.id).each do |ds|\n n_title = I18n.t('notifications.documents.destroyed.title')\n n_message = I18n.t('notifications.documents.destroyed.message', :document_title => self.title, :lesson_title => ds.lesson_title)\n n_basement = I18n.t('notifications.documents.destroyed.basement', :lesson_title => ds.lesson_title, :link => lesson_viewer_path(ds.lesson_id.to_i))\n if ds.my_user_id.to_i != self.user_id && !Notification.send_to(ds.my_user_id.to_i, n_title, n_message, n_basement)\n errors.add(:base, :problem_destroying)\n raise ActiveRecord::Rollback\n end\n Bookmark.where(:bookmarkable_type => 'Lesson', :bookmarkable_id => ds.lesson_id.to_i).each do |b|\n automatic_message = I18n.t('notifications.documents.standard_message_for_linked_lessons', :document_title => self.title)\n n_title = I18n.t('notifications.lessons.modified.title')\n n_message = I18n.t('notifications.lessons.modified.message', :lesson_title => ds.lesson_title, :message => automatic_message)\n n_basement = I18n.t('notifications.lessons.modified.basement', :lesson_title => ds.lesson_title, :link => lesson_viewer_path(ds.lesson_id.to_i))\n if !Notification.send_to(b.user_id, n_title, n_message, n_basement)\n errors.add(:base, :problem_destroying)\n raise ActiveRecord::Rollback\n end\n end\n end\n begin\n self.destroy\n rescue StandardError\n errors.add(:base, :problem_destroying)\n raise ActiveRecord::Rollback\n end\n resp = true\n end\n resp\n end",
"def notification_reject\n @notifications = current_user.notifications.where(notification_type: [\"reject\", \"return\"]).order(\"created_at desc\")\n render :layout => false\n end",
"def clear_patches\n @patches.clear\n end",
"def snmp_notification_receiver_remove(opts = {})\n cmd = 'no ' << snmp_notification_receiver_cmd(opts)\n configure cmd\n end",
"def run_on_removals(paths)\n paths.each do |path|\n system \"theme remove #{path}\"\n end\n end",
"def remove(*a)\n # event is namespaced to not accidently trigger closing the dialog box\n select(*a).trigger('clockwork.remove')\n select(*a).remove()\n end",
"def notify_unflagged(_cell); end",
"def admin_remove_service_notification(service)\n @service = service\n subject = \"Service #{service.title} has been removed!!\"\n setup_email(ADMIN_EMAIL, subject, service.user.email)\n end",
"def clean_notes\n for note in notes\n if note.comment.blank?\n note.destroy\n end\n end\n\n end",
"def run_on_changes(modified, added, removed)\n types = {\n MODIFICATION_TASKS => modified,\n ADDITION_TASKS => added,\n REMOVAL_TASKS => removed\n }\n\n ::Guard::UI.clearable\n\n _scoped_plugins do |plugin|\n ::Guard::UI.clear\n\n types.each do |tasks, unmatched_paths|\n next if unmatched_paths.empty?\n match_result = ::Guard::Watcher.match_files(plugin, unmatched_paths)\n next if match_result.empty?\n\n next unless (task = tasks.detect { |meth| plugin.respond_to?(meth) })\n _supervise(plugin, task, match_result)\n end\n end\n end",
"def remove_message_reaction(data); end",
"def ensure_deletion_fixes \n # TO DO\n end",
"def destroy\n #@event_event.destroy\n @event_event.deleted = true\n dest = @event_event.id\n type = 7 #event_notifications_code\n Notification.clear_notifications(type,dest)\n @event_event.save\n @event_event.user.remove_event\n respond_to do |format|\n format.html { redirect_to admin_event_events_url, notice: 'Event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def set_notifications\n @notifications = Notification.all\n end",
"def unsubscribe\n @issue.skip_email_notifications = true\n @issue.unsubscribe(current_user)\n render 'issues/update.js.erb', locals: {project: @project, key: @key, translation: @translation, issue: @issue }\n end",
"def deleteUserNotifications(u)\n @ns = getUserNotifications(u)\n @ns.each do |n|\n n.destroy\n end\n redirect_to \"/notifications\", notice: \"All notifications deleted.\"\n end",
"def deregister_snippets\n # Loop through all registered snippets\n @@registered.keys.each do |id|\n # Deregister the snippet view\n Hooks::View.deregister_dynamic_view(id)\n # Remove the snippet from the registered array\n @@registered.delete(id)\n end\n end",
"def changes_notification\n NotifierMailer.changes_notification(User.limit(2), {\n title: 'Email test title',\n content: 'Email test content',\n body: 'Email test body'\n })\n end",
"def remove_watcher\n email = args.shift\n\n if delete_watcher_from_email(email)\n puts \"Removed #{email} as a watcher\"\n else\n puts \"No watchers with that email found in the watcher list\"\n end\n end",
"def unload_plugins\n en_ete_dw_up\n tool_menu.remove(@menu_item_extract_text)\n end",
"def dont_notify_modification\n if @ok\n @lesson.dont_notify_changes\n render :nothing => true\n end\n end",
"def remove_marked\n @objects.remove_marked\n end",
"def unregister(*extensions); end",
"def run_on_removals(paths)\n Runner.remove(Inspector.clean(paths, :missing_ok => true), watchers, options)\n end",
"def remove_all_message_reactions(data); end",
"def destroy\n @detour = Detour.find(params[:id])\n @detour.notifications.each do |n|\n logger.info(\"Deleteing notification \" + n.id.to_s + \" for detour \" + @detour.id.to_s + \"\\n\")\n n.destroy\n end\n @detour.destroy\n\n respond_to do |format|\n format.html { redirect_to(detours_url) }\n format.xml { head :ok }\n end\n end",
"def remove_all_confirmation_events\n ConfirmationEvent.find_each(&:destroy)\n end",
"def unmanage window\n if h=managed[window]\n h[:source_funcs].each do |sf|\n sf.remove()\n end\n \n managed.delete window\n end\n end",
"def destroy\n @pic.destroy\n\n notifications = Notification.find(:all, :conditions => ['notification_type = ? and object_id = ?', \"comment\", @pic.id])\n notifications.each do |n|\n n.destroy\n end\n respond_to do |format|\n format.html { redirect_to(root_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @admin_notification.destroy\n\n head :no_content\n end",
"def remove_mentionables(mentioner)\n self.where(:mentioner_type => mentioner.class.name.classify).\n where(:mentioner_id => mentioner.id).destroy_all\n end",
"def release_notification\n @has_data = Settings.has_user_data\n @data = Settings.getSavedData\n \n #reset date. otherwise it's being shown all the time, once it's been set \n $choosed['1'] = nil\n end",
"def remove_notification\n if followable_type == \"User\"\n notice = Notification.where({\n target_id: followable_id,\n target_type: \"User\",\n notice_id: id,\n notice_type: \"Follow\",\n user_id: followable_id,\n notifier_id: follower_id\n }).first\n notice.blank? ? true : notice.destroy\n end\n end",
"def stop(examples_notification)\n super(examples_notification)\n end",
"def remove_usage_editor\n usage_id = params[:id]\n render :update do |page|\n page.replace_html(\"div_usage_extra_#{usage_id}\", \"\")\n end\n end",
"def remove_message(name)\n\t\tend",
"def remove_observers\n\t\t\t@observers.collect {|observer| self.remove_observer(observer) }\n\t\tend",
"def clean_pending_item_notifications!(owner_user_ids)\n owner_user_ids.each do|owner_user_id|\n pending_item_count = ::Item.pending.where(user_id: owner_user_id).count\n logger.info \" | user #{owner_user_id} has #{pending_item_count} pending items\"\n if pending_item_count < 1\n ::Users::Notification.where(sender_user_id: owner_user_id, related_model_type: 'Child',\n related_model_id: owner_user_id, title: ::Item::PENDING_ITEM_TITLE).\n destroy_all\n end\n end\n end",
"def remove_notification_message parrent, user_id\n #ap 'NotificationModule#remove_notification_message'\n #ap user_id\n #ap parrent.id\n #ap parrent.class.name\n if notification_message = NotificationMessage.find_by(\n user_id: user_id,\n asset_id: parrent.id,\n asset_type: parrent.class.name\n )\n notification_message.destroy\n end\n end",
"def remove_pending\n authorize! :update, @user, :message => t('errors.messages.not_authorized_as_manager')\n\n @user = User.find(params[:id])\n @marina = Marina.find(params[:marina])\n @marina.pending_users.delete(@user)\n\n @user.marina_state= \"\"\n UserNotifier.remove_pending(@user).deliver\n @user.save\n @marina.save\n redirect_to marina_path(@marina), :notice => t('errors.messages.remove_pending')\n #\"Bertholder and marina are now connected. a notification email has been sent\"\n\n\n\n end",
"def remove_silencers!\n @silencers = []\n end",
"def remove_silencers!\n @silencers = []\n end",
"def remove_silencers!\n @silencers = []\n end",
"def declines!(submission)\n collaboration = collaborations.where(submission: submission).first\n # Remove the associated notification.\n notifications.where(notifyable_type: 'Collaboration', notifyable_id: collaboration.id).first.destroy\n collaboration.destroy\n end",
"def destroy\n @notification_content.destroy\n respond_to do |format|\n format.html { redirect_to notification_contents_url, notice: 'Notification content was successfully destroyed.' }\n end\n end",
"def clear_changes!\n @changes.clear\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n authorize(Notification)\n if @notification.destroy\n msg = success_message(@notification, _('deleted'))\n redirect_to super_admin_notifications_path, notice: msg\n else\n flash.now[:alert] = failure_message(@notification, _('delete'))\n render :edit\n end\n end",
"def destroy\n if @notification.from_type == \"product\"\n @notification.destroy\n respond_to do |format|\n format.json { head :no_content }\n format.html { redirect_to denounced_products_path, notice: 'Notificacion eliminada correctamente' }\n end\n elsif @notification.from_type == \"comment\"\n @notification.destroy\n respond_to do |format|\n format.json { head :no_content }\n format.html { redirect_to denounced_comments_path, notice: 'Notificacion eliminada correctamente' }\n end\n \n elsif @notification.from_type == \"contact\"\n @notification.destroy\n respond_to do |format|\n format.json { head :no_content }\n format.html { redirect_to messages_path, notice: 'Notificacion eliminada correctamente' }\n end\n elsif @notification.from_type == \"suggest\"\n @notification.destroy\n respond_to do |format|\n format.json { head :no_content }\n format.html {redirect_to suggested_products_path, notice: 'Notificacion eliminada correctamente'}\n end\n else\n @notification.destroy\n respond_to do |format|\n format.json { head :no_content }\n format.html { redirect_to notifications_path, notice: 'Notificacion eliminada correctamente' }\n end\n end\n end",
"def clear_new_badges\n achievements.each(&:clear_new_badges)\n end",
"def remove_bugs_from_errata(advisory, bugids)\n user = find_user\n unless user.in_role?('devel', 'pm', 'secalert')\n raise \"Do not have permission to remove bugs!\"\n end\n\n errata = find_errata(advisory)\n to_delete = FiledBug.find(:all, :conditions => ['errata_id = ? and bug_id in (?)',\n errata,\n bugids])\n dbs = DroppedBugSet.new(:bugs => to_delete, :errata => errata)\n if dbs.save\n msg = \"Removed the following bugs:\\n\" + to_delete.collect { |b| \"bug #{b.id}\" }.join(\"\\n\")\n else\n msg = \"Error dropping bugs: #{dbs.errors.full_messages.join(',')}\"\n end\n return msg\n end",
"def destroy\n #@admin_academy_question.destroy\n a = Academy::Question.find(params[:id].split('-')[0])\n a.update(:is_deleted => true)\n dest = a.id\n type = 4 #answer_question_code\n Notification.clear_notifications(type,dest)\n a.save\n\n respond_to do |format|\n format.html { redirect_to admin_academy_questions_url }\n format.json { head :no_content }\n end\n end",
"def run_on_removals(paths)\n end",
"def destroy\n @notifier.destroy\n respond_to do |format|\n format.html { redirect_to notifiers_url, notice: 'Notifier was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n\t\t@notification = Notification.find(params[:id])\r\n\t\tif current_user != nil\r\n\t\t\tAction.create(info: current_user.username + ' has deleted this notification: (' + @notification.info + ') belonging to ' + User.find(@notification.user_id).username + '.', user_email: current_user.email)\r\n\t\telse\r\n\t\t\tAction.create(info: 'A system has deleted this notification: (' + @notification.info + ') belonging to ' + User.find(@notification.user_id).username + '.', user_email: 'SystemAdmin')\r\n\t\tend\r\n\t\t@notification.destroy\r\n\t\tredirect_to :back\r\n\tend",
"def auto_delete_notices\n @notices.each { |obj|\n if obj.auto_delete && !obj.end_date.nil? && obj.end_date.past?\n obj.destroy\n end\n }\n end",
"def remove\n @issue.labels!(@issue.labels - grid)\n end"
] | [
"0.6260517",
"0.6110999",
"0.6046434",
"0.60085297",
"0.59338176",
"0.5880503",
"0.5869141",
"0.5804477",
"0.57785124",
"0.5757553",
"0.5727281",
"0.5698001",
"0.5684405",
"0.56784624",
"0.56572413",
"0.5634316",
"0.56001025",
"0.55715257",
"0.55238694",
"0.54960984",
"0.54583085",
"0.54218477",
"0.54082465",
"0.5402044",
"0.53899306",
"0.535832",
"0.53551006",
"0.5345124",
"0.53367895",
"0.53127974",
"0.5302019",
"0.52795523",
"0.5272519",
"0.5272519",
"0.5261044",
"0.52386194",
"0.5220151",
"0.5204974",
"0.5190712",
"0.5188517",
"0.5182786",
"0.5175978",
"0.51756525",
"0.5174634",
"0.5168742",
"0.51384133",
"0.5138038",
"0.513364",
"0.51297903",
"0.51249814",
"0.51191384",
"0.51160073",
"0.511175",
"0.5106021",
"0.51056784",
"0.510151",
"0.5087205",
"0.5082064",
"0.50775945",
"0.5075595",
"0.5069018",
"0.50655806",
"0.50649935",
"0.50648516",
"0.5058485",
"0.50508463",
"0.50474185",
"0.50460833",
"0.5045844",
"0.5040525",
"0.5031315",
"0.50307643",
"0.502888",
"0.5027085",
"0.5013245",
"0.5013133",
"0.5007888",
"0.5000293",
"0.49998844",
"0.49982458",
"0.49847496",
"0.49847496",
"0.49847496",
"0.49746075",
"0.49730858",
"0.49648523",
"0.49611947",
"0.49611947",
"0.49611947",
"0.49611947",
"0.49496207",
"0.49437857",
"0.4942856",
"0.4940327",
"0.49328467",
"0.4931326",
"0.49311778",
"0.4929353",
"0.49286845",
"0.49185196"
] | 0.66725373 | 0 |
Remove notifications from proof editors | def remove_proof_upvote_notifications
((@resource.versions.map{ |version| version.user }).uniq - [@actor]).each do |editor|
n = Notification.find_by(recipient: @resource.user, actor: @actor,
action_type: "like",
notifiable: @resource) if @resource.user != @actor
if !n.nil?
n.destroy
@removed += 1
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_problem_upvote_notifications\n ((@resource.versions.map{ |version| version.user }).uniq - [@actor]).each do |editor|\n n = Notification.find_by(recipient: editor,\n actor: @actor,\n notifiable: @resource,\n action_type: \"like\")\n if !n.nil?\n n.destroy\n @removed += 1\n end\n end\n\n end",
"def remove_from_notifications\n notifications.find_all { |n| n.noti_read == 'N' }.each do |n|\n remove_from_notification(n)\n end # each n\n end",
"def destroy_notifications\n Notification.where(notifiable_id: @lecture.id, notifiable_type: 'Lecture')\n .delete_all\n end",
"def send_deletion_notification\n @notifiable = self\n @tutor = User.find(self.user_id)\n @student = User.find(self.pupil_id)\n Notification.create(:user => @tutor, :receiver_id => @student.id, :message => @tutor.title + ' has removed you from their student list')\n end",
"def clear_notifications\n Notification.where(actor_id: id).destroy_all\n end",
"def fRemoveNotificationsFrom (email)\n @users.removeNotificationsFrom(email)\n end",
"def remove_comment_upvote_notifications\n n = Notification.find_by(recipient: @resource.user, actor: @actor, \n action_type: \"like\",\n notifiable: @resource) if @resource.user != @actor\n if !n.nil?\n n.destroy\n @removed += 1\n end\n end",
"def delete_notifications\n Notification.where(origin_type: 'Message', origin_id: @message.id).destroy_all\n end",
"def run_on_removal(paths)\n @builder.notify\n end",
"def after_destroy(post)\n post = post.to_post\n Notification.where(:scope => 'mention',\n :source_ids => {'Post' => post.id}).each do |notification|\n notification.remove_source(post)\n notification.update_actors\n end\n end",
"def exec_commande_notifications\n # On affiche toujours la liste des notifications\n clear\n notifications.wait_for_choix_and_execute\n end",
"def delete_notifier(name)\n @notifiers.delete(name) if defined? @notifiers\n end",
"def remove_notifier(notifier)\n @notifiers.delete(notifier)\n end",
"def release_notification\n @has_data = Settings.has_user_data\n @data = Settings.getSavedData\n \n #reset date. otherwise it's being shown all the time, once it's been set \n $choosed['1'] = nil\n end",
"def destroy\n @notification.destroy\n end",
"def delete_notifications\n @tutor = Tutor.find(params[:id])\n Notification.where(user_type: 'Tutor', user_id: @tutor.id).destroy_all\n end",
"def remove_from_notification (n)\n logger.debug2 \"comment id #{id}. Notification id #{n.id}. notification key #{n.noti_key}\" if debug_notifications\n # only modify unread notifications\n return unless n.noti_read == 'N'\n cn = notifications.where(\"notification_id = ?\", n.id).first\n logger.debug2 \"cn.class = #{cn.class}\" if debug_notifications\n logger.debug2 \"cn.id = #{cn.id}\" if cn and debug_notifications\n logger.debug2 \"cn.noti_key = #{cn.noti_key}\" if cn and debug_notifications\n logger.debug2 \"cn.from_user.short_user_name = #{cn.from_user.short_user_name}\" if cn and cn.from_user and debug_notifications\n logger.debug2 \"cn.to_user.short_user_name = #{cn.to_user.short_user_name}\" if cn and cn.to_user and debug_notifications\n # find no users before and after removing this comment from notification\n old_no_users = n.api_comments.collect { |c| c.user_id }.uniq.size\n new_users = n.api_comments.find_all { |ac| ac.id != id }.collect { |ac| ac.user }.uniq\n new_no_users = new_users.size\n if new_no_users == 0\n # last user for this unread notification has been removed\n logger.debug2 \"last user for this unread notification has been removed\" if debug_notifications\n n.destroy!\n return\n end\n return if old_no_users == new_no_users # unchanged number of users => unchanged notification\n if new_no_users > 3\n # unchanged noti_key and username array. Just change number of users\n logger.debug2 \"unchanged noti_key and username array. Just change number of users\" if debug_notifications\n notifications.delete(cn) if cn\n noti_options = n.noti_options\n noti_options[:no_users] = new_no_users\n noti_options[:no_other_users] = new_no_users - 2\n n.noti_options = noti_options\n n.save!\n return\n end\n # change noti_key, username array and number of users\n if n.noti_key !~ /^([a-z_]+)_(\\d)_v(\\d+)$/\n logger.debug2 \"invalid noti key format. noti key = #{noti_key}\"\n return\n end\n logger.debug2 \"change noti_key, username array and number of users\" if debug_notifications\n noti_key_prefix, noti_key_no_users, noti_key_version = $1, $2, $3\n noti_options = n.noti_options\n (1..3).each { |i| noti_options[\"username#{i}\".to_sym] = nil }\n usernames = new_users.collect { |u| u.short_user_name }\n 0.upto(usernames.size-1).each do |i|\n noti_options[\"username#{i+1}\".to_sym] = usernames[i]\n end\n noti_options[:no_users] = new_no_users\n noti_options[:no_other_users] = new_no_users - 2\n n.noti_key = \"#{noti_key_prefix}_#{new_no_users}_v#{noti_key_version}\"\n logger.debug2 \"noti_key: old = #{n.noti_key_was}, new = #{n.noti_key}\" if debug_notifications\n n.noti_options = noti_options\n notifications.delete(cn) if cn\n n.save!\n end",
"def notifications\n end",
"def remove_message_reaction(data); end",
"def index\n @notifications = Notification.where user_id: current_user.id, seen: nil\n @notifications = @notifications.sort { | x , y | x.seen? ? 1:0 <=> y.seen? ? 0:1}\n\n @notifications.each do |msg|\n msg.destroy\n end\n\n end",
"def down\n change_column_null :notifications, :notification_content, false\n end",
"def destroy\n \n \n #remove notification \n #remove plots \n\n\n if current_user.id==@tale.user_id\n notifications = Journal.where(notification_type: \"StoryCreate\", notification_id: @tale.id)\n notifications.each do|notification|\n notification.delete\n end \n\n\n @tale.destroy\n\n\n\n\n\n\n respond_to do |format|\n #format.html { redirect_to tales_url }\n format.html { redirect_to profiles_index_path(current_user.id) }\n format.json { head :no_content }\n end\n end\n\n\n end",
"def remove_notification\n if followable_type == \"User\"\n notice = Notification.where({\n target_id: followable_id,\n target_type: \"User\",\n notice_id: id,\n notice_type: \"Follow\",\n user_id: followable_id,\n notifier_id: follower_id\n }).first\n notice.blank? ? true : notice.destroy\n end\n end",
"def remove_footnotes(elements); end",
"def clear_notifier!\n self.notifier = nil\n self.root_notifier = nil\n end",
"def snmp_notification_receiver_remove(opts = {})\n cmd = 'no ' << snmp_notification_receiver_cmd(opts)\n configure cmd\n end",
"def remove_remaining_notification_settings\n source_project.notification_settings.destroy_all # rubocop: disable DestroyAll\n end",
"def skip_email_changed_notification!; end",
"def remove_all_message_reactions(data); end",
"def clean_notes\n for note in notes\n if note.comment.blank?\n note.destroy\n end\n end\n\n end",
"def destroy\n @admin_notification.destroy\n\n head :no_content\n end",
"def unpublish_revisions\n #Unpublish us\n if self.submitted?\n self.deleted!\n save\n end\n if self.revisions.present?\n #Unpublish the revisions\n self.revisions.each do |event|\n if event.submitted?\n event.deleted!\n event.save\n end\n end\n end\n end",
"def remove_all_confirmation_events\n ConfirmationEvent.find_each(&:destroy)\n end",
"def deleteEventNotifications(u, e)\n @ns = getEventNotifications(u, e)\n @ns.each do |n|\n n.destroy\n end\n if u != nil\n redirect_to \"/notifications\", notice: \"All notifications for \" + Event.find_by(id: e).name + \" deleted.\"\n end\n end",
"def notify\n reviewers = proposal.reviewers.reject{|r| r.id == user_id }\n Notification.create_for(reviewers, proposal: proposal, message: \"Internal comment on #{proposal.title}\")\n end",
"def remove\n \t@watcher.remove(@event, @prok)\n end",
"def clean_for_period(future_period, notification_times)\n future_period.notifications.all.each do |existing_notification|\n unless notification_times.include?(existing_notification.time.utc)\n existing_notification.delete\n end\n end\n end",
"def resource_context_destroy\n # only notify for customizations being deleted, not other kinds\n if !self.institution_id.nil? && self.requirement_id.nil? && self.resource_id.nil?\n institution = self.institution\n template = self.requirements_template\n users = institution.users_in_and_above_inst_in_role(Role::RESOURCE_EDITOR)\n users += institution.users_in_and_above_inst_in_role(Role::INSTITUTIONAL_ADMIN)\n users.uniq! #only the unique users, so each user is only listed once\n users.delete_if {|u| !u[:prefs][:resource_editors][:deleted] }\n users.each do |user|\n UsersMailer.notification(\n user.email,\n \"DMP Template Customization Deleted: #{template.name}\",\n \"resource_editors_deleted\",\n {:user => user, :customization => self} ).deliver\n end\n end\n end",
"def run_on_removal(paths)\n super\n end",
"def cleanup\n\t\t\tself.framework.events.remove_session_subscriber(self)\n\t\t\tremove_console_dispatcher('notify')\n\t\tend",
"def cleanup\n\t\t\tself.framework.events.remove_session_subscriber(self)\n\t\t\tremove_console_dispatcher('notify')\n\t\tend",
"def destroy\n #@event_event.destroy\n @event_event.deleted = true\n dest = @event_event.id\n type = 7 #event_notifications_code\n Notification.clear_notifications(type,dest)\n @event_event.save\n @event_event.user.remove_event\n respond_to do |format|\n format.html { redirect_to admin_event_events_url, notice: 'Event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def remove_marked\n @objects.remove_marked\n end",
"def remove_silencers!\n @silencers = []\n end",
"def remove_silencers!\n @silencers = []\n end",
"def remove_silencers!\n @silencers = []\n end",
"def strip_removed_issues!\n removed_issues.each { |issue| issue.update!(review_request: nil) }\n end",
"def remove_invalid_emails\n notification_emails.each do |email|\n if !email.valid?\n notification_emails.delete email\n end\n end\n end",
"def unmanage window\n if h=managed[window]\n h[:source_funcs].each do |sf|\n sf.remove()\n end\n \n managed.delete window\n end\n end",
"def skip_confirmation_notification!; end",
"def remove_all_publications\n add_actions 'RemovePublication()'\n end",
"def admin_remove_service_notification(service)\n @service = service\n subject = \"Service #{service.title} has been removed!!\"\n setup_email(ADMIN_EMAIL, subject, service.user.email)\n end",
"def deliver_security_notification_destroy\n deliver_security_notification(\n recipients: [address],\n message: :mail_body_security_notification_remove,\n field: :field_mail,\n value: address\n )\n end",
"def auto_delete_notices\n @notices.each { |obj|\n if obj.auto_delete && !obj.end_date.nil? && obj.end_date.past?\n obj.destroy\n end\n }\n end",
"def destroy\n @pic.destroy\n\n notifications = Notification.find(:all, :conditions => ['notification_type = ? and object_id = ?', \"comment\", @pic.id])\n notifications.each do |n|\n n.destroy\n end\n respond_to do |format|\n format.html { redirect_to(root_url) }\n format.xml { head :ok }\n end\n end",
"def remove_except_follower\n events=$PokemonGlobal.dependentEvents\n $PokemonGlobal.dependentEvents.each_with_index do |event,i|\n next if event[8][/FollowerPkmn/]\n events[i] = nil\n @realEvents[i] = nil\n @lastUpdate += 1\n end\n events.compact!\n @realEvents.compact!\n end",
"def delete_notification(principal_uri, notification)\n end",
"def run_on_removals(paths)\n paths.each do |path|\n warn \"file #{path} removed -- it's up to you to remove it from the server if desired\"\n end\n end",
"def destroy\n @notification.destroy\n head :no_content \n end",
"def remove_references(options)\n \t# just got to remove the assigned_pics Tree hash\n \tPictureandmeta.delete_event(self)\n end",
"def remove_message(name)\n\t\tend",
"def remove_notification_message parrent, user_id\n #ap 'NotificationModule#remove_notification_message'\n #ap user_id\n #ap parrent.id\n #ap parrent.class.name\n if notification_message = NotificationMessage.find_by(\n user_id: user_id,\n asset_id: parrent.id,\n asset_type: parrent.class.name\n )\n notification_message.destroy\n end\n end",
"def destroy\n authorize(Notification)\n if @notification.destroy\n msg = success_message(@notification, _('deleted'))\n redirect_to super_admin_notifications_path, notice: msg\n else\n flash.now[:alert] = failure_message(@notification, _('delete'))\n render :edit\n end\n end",
"def notations; end",
"def remove_pending\n authorize! :update, @user, :message => t('errors.messages.not_authorized_as_manager')\n\n @user = User.find(params[:id])\n @marina = Marina.find(params[:marina])\n @marina.pending_users.delete(@user)\n\n @user.marina_state= \"\"\n UserNotifier.remove_pending(@user).deliver\n @user.save\n @marina.save\n redirect_to marina_path(@marina), :notice => t('errors.messages.remove_pending')\n #\"Bertholder and marina are now connected. a notification email has been sent\"\n\n\n\n end",
"def ClearYesNoMessages\n @yesno_messages = []\n\n nil\n end",
"def remove\n args.each do |name|\n messages = nil\n action(\"Removing #{name} from #{app}\") do\n messages = addon_run { heroku.uninstall_addon(app, name, :confirm => options[:confirm]) }\n end\n output messages[:attachment] if messages[:attachment]\n output messages[:message]\n end\n end",
"def reset_notifier!\n notifier.reset!\n end",
"def remove_idea_events(events)\n events.delete_if(&:idea?)\nend",
"def destroy\n @detour = Detour.find(params[:id])\n @detour.notifications.each do |n|\n logger.info(\"Deleteing notification \" + n.id.to_s + \" for detour \" + @detour.id.to_s + \"\\n\")\n n.destroy\n end\n @detour.destroy\n\n respond_to do |format|\n format.html { redirect_to(detours_url) }\n format.xml { head :ok }\n end\n end",
"def unsubscribe_from_notifications(client, collection)\n mirror = client.discovered_api('mirror', 'v1')\n result = client.execute(\n :api_method => mirror.subscriptions.delete,\n :parameters => { 'id' => collection })\n if result.error?\n puts \"An error occurred: #{result.data['error']['message']}\"\n end\n end",
"def unconfirm_with_notify(another_user)\n return nil unless another_user.instance_of?(User) && @user.has_associated_swimmer? && another_user.has_associated_swimmer?\n\n result = UserSwimmerConfirmation.unconfirm_for(another_user, another_user.swimmer, @user)\n if result\n NewsFeed.create_social_feed(\n another_user.id,\n @user.id,\n I18n.t('newsfeed.unconfirm_title'),\n I18n.t('newsfeed.unconfirm_body').gsub('{SWIMMER_NAME}', another_user.swimmer.get_full_name)\n )\n # TODO: Block friendships also?\n # TODO Create also achievement accordingly\n end\n result\n end",
"def remove_watcher\n email = args.shift\n\n if delete_watcher_from_email(email)\n puts \"Removed #{email} as a watcher\"\n else\n puts \"No watchers with that email found in the watcher list\"\n end\n end",
"def ensure_deletion_fixes \n # TO DO\n end",
"def destroy\n #@admin_academy_question.destroy\n a = Academy::Question.find(params[:id].split('-')[0])\n a.update(:is_deleted => true)\n dest = a.id\n type = 4 #answer_question_code\n Notification.clear_notifications(type,dest)\n a.save\n\n respond_to do |format|\n format.html { redirect_to admin_academy_questions_url }\n format.json { head :no_content }\n end\n end",
"def listeners\n users_to_notify = Set.new\n users_to_notify += self.proposal.currently_awaiting_approvers\n users_to_notify += self.proposal.individual_steps.approved.map(&:user)\n users_to_notify += self.proposal.observers\n users_to_notify << self.proposal.requester\n # Creator of comment doesn't need to be notified\n users_to_notify.delete(self.user)\n users_to_notify\n end",
"def notify\n notify_unmentioned_reviewers\n notify_mentioned_event_staff if mention_names.any?\n end",
"def notify_unflagged(_cell); end",
"def unpublish_self\n if self.submitted?\n self.deleted!\n save\n end\n if self.revisions.present?\n self.revisions.each do |event|\n if event.submitted?\n event.deleted!\n event.save\n end\n end\n end\n end",
"def nullify_jabber_confirmation\n unconfirm_jabber if saved_change_to_attribute?(:jabber)\n end",
"def destroy\n @notification.destroy\n return_back_or_ajax\n end",
"def deleteUserNotifications(u)\n @ns = getUserNotifications(u)\n @ns.each do |n|\n n.destroy\n end\n redirect_to \"/notifications\", notice: \"All notifications deleted.\"\n end",
"def delete_notification(principal_uri, notification)\n @notifications[principal_uri].delete_if do |value|\n notification == value\n end\n end",
"def destroy_post_owner_notification_of_reply(reply)\n self.notifications.find_by_key(post_reply_notification_key(reply)).destroy rescue true\n end",
"def destroy\n @notification = Notification.find_by(id: params[:id])\n if(curr_user_has_notification(@notification.id))\n @notification.destroy\n end\n redirect_to notification_url\n end",
"def after_soft_delete; end",
"def free_resources\n\t\tunset_vim_event_hooks\n\tend",
"def set_notifications\n @notifications = Notification.all\n end",
"def remove_mentionables(mentioner)\n self.where(:mentioner_type => mentioner.class.name.classify).\n where(:mentioner_id => mentioner.id).destroy_all\n end",
"def run_on_removals(paths)\n Runner.remove(Inspector.clean(paths, :missing_ok => true), watchers, options)\n end",
"def clean!\n @changes = []\n end",
"def destroy_with_notifications\n errors.clear\n if self.new_record?\n errors.add(:base, :problem_destroying)\n return false\n end\n resp = false\n ActiveRecord::Base.transaction do\n DocumentsSlide.joins(:slide, {:slide => :lesson}).select('lessons.user_id AS my_user_id, lessons.title AS lesson_title, lessons.id AS lesson_id').group('lessons.id').where('documents_slides.document_id = ?', self.id).each do |ds|\n n_title = I18n.t('notifications.documents.destroyed.title')\n n_message = I18n.t('notifications.documents.destroyed.message', :document_title => self.title, :lesson_title => ds.lesson_title)\n n_basement = I18n.t('notifications.documents.destroyed.basement', :lesson_title => ds.lesson_title, :link => lesson_viewer_path(ds.lesson_id.to_i))\n if ds.my_user_id.to_i != self.user_id && !Notification.send_to(ds.my_user_id.to_i, n_title, n_message, n_basement)\n errors.add(:base, :problem_destroying)\n raise ActiveRecord::Rollback\n end\n Bookmark.where(:bookmarkable_type => 'Lesson', :bookmarkable_id => ds.lesson_id.to_i).each do |b|\n automatic_message = I18n.t('notifications.documents.standard_message_for_linked_lessons', :document_title => self.title)\n n_title = I18n.t('notifications.lessons.modified.title')\n n_message = I18n.t('notifications.lessons.modified.message', :lesson_title => ds.lesson_title, :message => automatic_message)\n n_basement = I18n.t('notifications.lessons.modified.basement', :lesson_title => ds.lesson_title, :link => lesson_viewer_path(ds.lesson_id.to_i))\n if !Notification.send_to(b.user_id, n_title, n_message, n_basement)\n errors.add(:base, :problem_destroying)\n raise ActiveRecord::Rollback\n end\n end\n end\n begin\n self.destroy\n rescue StandardError\n errors.add(:base, :problem_destroying)\n raise ActiveRecord::Rollback\n end\n resp = true\n end\n resp\n end",
"def grant!\n unless person.editors.include?(editor)\n person.editors << editor\n EditorRequestMailer.notification(self).deliver_now\n end\n destroy\n end",
"def remove\n return unless confirm_command\n\n args.each do |name|\n messages = nil\n if name.start_with? \"HEROKU_POSTGRESQL_\"\n name = name.chomp(\"_URL\").freeze\n end\n action(\"Removing #{name} on #{app}\") do\n messages = addon_run { heroku.uninstall_addon(app, name, :confirm => app) }\n end\n display(messages[:attachment]) if messages[:attachment]\n display(messages[:message]) if messages[:message]\n end\n end",
"def clear_patches\n @patches.clear\n end",
"def run_on_removals(paths)\n paths.each do |path|\n system \"theme remove #{path}\"\n end\n end",
"def RemoveObsoleteResolvables\n Builtins.y2milestone(\"--------- removing obsolete selections ---------\")\n\n # this removes only information about selections and applied patches\n # it doesn't remove any package\n Builtins.y2milestone(\n \"Removing all information about selections and patches in %1\",\n Installation.destdir\n )\n Pkg.TargetStoreRemove(Installation.destdir, :selection)\n\n # disabled by FATE #301990, bugzilla #238488\n # Pkg::TargetStoreRemove (Installation::destdir, `patch);\n\n Builtins.y2milestone(\"--------- removing obsolete selections ---------\")\n\n nil\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @notification.destroy\n respond_to do |format|\n format.html { redirect_to notifications_url }\n format.json { head :no_content }\n end\n end"
] | [
"0.6593376",
"0.6380433",
"0.6120637",
"0.6116147",
"0.61107093",
"0.6052392",
"0.5969748",
"0.59078586",
"0.5905529",
"0.58484566",
"0.58292305",
"0.5812028",
"0.5777078",
"0.56985176",
"0.5674592",
"0.5670423",
"0.56559753",
"0.5570358",
"0.5560994",
"0.55516726",
"0.5520275",
"0.54785156",
"0.5470494",
"0.5464708",
"0.54328036",
"0.5418419",
"0.54092014",
"0.5403503",
"0.5386417",
"0.5378536",
"0.53719884",
"0.5369494",
"0.53678566",
"0.53573996",
"0.534897",
"0.5346573",
"0.53396374",
"0.53263766",
"0.53262854",
"0.5326004",
"0.5326004",
"0.5289387",
"0.52881896",
"0.52725357",
"0.52725357",
"0.52725357",
"0.52689654",
"0.5268117",
"0.52624106",
"0.5261852",
"0.52611446",
"0.52447855",
"0.5243292",
"0.52394354",
"0.52216834",
"0.52133083",
"0.5208214",
"0.52005744",
"0.5191755",
"0.5187297",
"0.5177125",
"0.5170375",
"0.51613635",
"0.51574326",
"0.51537484",
"0.5145324",
"0.514052",
"0.51400524",
"0.5139379",
"0.5124236",
"0.5120561",
"0.5120085",
"0.511455",
"0.5111994",
"0.5111512",
"0.5110285",
"0.51087403",
"0.5106438",
"0.50936586",
"0.5091611",
"0.5087539",
"0.50873846",
"0.5080951",
"0.50789",
"0.5076561",
"0.50694877",
"0.5069273",
"0.5058513",
"0.5037437",
"0.5036154",
"0.5033819",
"0.50297904",
"0.5024117",
"0.50185704",
"0.501703",
"0.50101084",
"0.50065917",
"0.5006397",
"0.5006397",
"0.5006397"
] | 0.6894099 | 0 |
Remove notification from owner of comment | def remove_comment_upvote_notifications
n = Notification.find_by(recipient: @resource.user, actor: @actor,
action_type: "like",
notifiable: @resource) if @resource.user != @actor
if !n.nil?
n.destroy
@removed += 1
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_notification\n if followable_type == \"User\"\n notice = Notification.where({\n target_id: followable_id,\n target_type: \"User\",\n notice_id: id,\n notice_type: \"Follow\",\n user_id: followable_id,\n notifier_id: follower_id\n }).first\n notice.blank? ? true : notice.destroy\n end\n end",
"def remove_from_notification (n)\n logger.debug2 \"comment id #{id}. Notification id #{n.id}. notification key #{n.noti_key}\" if debug_notifications\n # only modify unread notifications\n return unless n.noti_read == 'N'\n cn = notifications.where(\"notification_id = ?\", n.id).first\n logger.debug2 \"cn.class = #{cn.class}\" if debug_notifications\n logger.debug2 \"cn.id = #{cn.id}\" if cn and debug_notifications\n logger.debug2 \"cn.noti_key = #{cn.noti_key}\" if cn and debug_notifications\n logger.debug2 \"cn.from_user.short_user_name = #{cn.from_user.short_user_name}\" if cn and cn.from_user and debug_notifications\n logger.debug2 \"cn.to_user.short_user_name = #{cn.to_user.short_user_name}\" if cn and cn.to_user and debug_notifications\n # find no users before and after removing this comment from notification\n old_no_users = n.api_comments.collect { |c| c.user_id }.uniq.size\n new_users = n.api_comments.find_all { |ac| ac.id != id }.collect { |ac| ac.user }.uniq\n new_no_users = new_users.size\n if new_no_users == 0\n # last user for this unread notification has been removed\n logger.debug2 \"last user for this unread notification has been removed\" if debug_notifications\n n.destroy!\n return\n end\n return if old_no_users == new_no_users # unchanged number of users => unchanged notification\n if new_no_users > 3\n # unchanged noti_key and username array. Just change number of users\n logger.debug2 \"unchanged noti_key and username array. Just change number of users\" if debug_notifications\n notifications.delete(cn) if cn\n noti_options = n.noti_options\n noti_options[:no_users] = new_no_users\n noti_options[:no_other_users] = new_no_users - 2\n n.noti_options = noti_options\n n.save!\n return\n end\n # change noti_key, username array and number of users\n if n.noti_key !~ /^([a-z_]+)_(\\d)_v(\\d+)$/\n logger.debug2 \"invalid noti key format. noti key = #{noti_key}\"\n return\n end\n logger.debug2 \"change noti_key, username array and number of users\" if debug_notifications\n noti_key_prefix, noti_key_no_users, noti_key_version = $1, $2, $3\n noti_options = n.noti_options\n (1..3).each { |i| noti_options[\"username#{i}\".to_sym] = nil }\n usernames = new_users.collect { |u| u.short_user_name }\n 0.upto(usernames.size-1).each do |i|\n noti_options[\"username#{i+1}\".to_sym] = usernames[i]\n end\n noti_options[:no_users] = new_no_users\n noti_options[:no_other_users] = new_no_users - 2\n n.noti_key = \"#{noti_key_prefix}_#{new_no_users}_v#{noti_key_version}\"\n logger.debug2 \"noti_key: old = #{n.noti_key_was}, new = #{n.noti_key}\" if debug_notifications\n n.noti_options = noti_options\n notifications.delete(cn) if cn\n n.save!\n end",
"def destroy_post_owner_notification_of_reply(reply)\n self.notifications.find_by_key(post_reply_notification_key(reply)).destroy rescue true\n end",
"def remove_notification_message parrent, user_id\n #ap 'NotificationModule#remove_notification_message'\n #ap user_id\n #ap parrent.id\n #ap parrent.class.name\n if notification_message = NotificationMessage.find_by(\n user_id: user_id,\n asset_id: parrent.id,\n asset_type: parrent.class.name\n )\n notification_message.destroy\n end\n end",
"def send_deletion_notification\n @notifiable = self\n @tutor = User.find(self.user_id)\n @student = User.find(self.pupil_id)\n Notification.create(:user => @tutor, :receiver_id => @student.id, :message => @tutor.title + ' has removed you from their student list')\n end",
"def destroy_post_owner_notification_of_like(like)\n self.notifications.find_by_key(post_like_notification_key(like)).destroy rescue true\n end",
"def delete_notification(principal_uri, notification)\n end",
"def fRemoveNotificationsFrom (email)\n @users.removeNotificationsFrom(email)\n end",
"def delete_notification(principal_uri, notification)\n @notifications[principal_uri].delete_if do |value|\n notification == value\n end\n end",
"def unsubscribe_from_comments user\n subscription = find_or_build_comment_subscription user\n subscription.subscribed = false\n subscription.save!\n end",
"def after_destroy(post)\n post = post.to_post\n Notification.where(:scope => 'mention',\n :source_ids => {'Post' => post.id}).each do |notification|\n notification.remove_source(post)\n notification.update_actors\n end\n end",
"def remove_proof_upvote_notifications\n ((@resource.versions.map{ |version| version.user }).uniq - [@actor]).each do |editor|\n n = Notification.find_by(recipient: @resource.user, actor: @actor, \n action_type: \"like\", \n notifiable: @resource) if @resource.user != @actor\n if !n.nil?\n n.destroy\n @removed += 1\n end\n end\n end",
"def remove_with_notify(swimming_buddy)\n if @user.remove_friendship(swimming_buddy)\n NewsFeed.create_social_feed(\n @user.id,\n swimming_buddy.id,\n I18n.t('newsfeed.remove_title'),\n I18n.t('newsfeed.remove_body').gsub('{SWIMMER_NAME}', swimming_buddy.get_full_name)\n )\n # TODO: Create also achievement row accordingly?\n end\n end",
"def destroy\n @notification = Notification.find_by(id: params[:id])\n if(curr_user_has_notification(@notification.id))\n @notification.destroy\n end\n redirect_to notification_url\n end",
"def clear_notifications\n Notification.where(actor_id: id).destroy_all\n end",
"def remove_problem_upvote_notifications\n ((@resource.versions.map{ |version| version.user }).uniq - [@actor]).each do |editor|\n n = Notification.find_by(recipient: editor,\n actor: @actor,\n notifiable: @resource,\n action_type: \"like\")\n if !n.nil?\n n.destroy\n @removed += 1\n end\n end\n\n end",
"def delete_comment!(permlink); delete_comment(permlink).broadcast!(true); end",
"def destroy\n @notification.destroy\n end",
"def remove_notification\n \n @ac = AttachContact.find(params[:user_id])\n RecentActivity.create(:transaction_id => @transaction.id, \n :user_id => current_user.id, \n :message => \"Removed Contact: #{@ac.user.fullname} as #{@ac.role.name if @ac.role}\")\n \n render :nothing => true\n end",
"def del_comment\n @comment = ''\n end",
"def destroy\n @comment_id = params[:commenter]\n if @comment_id.eql? current_user.id.to_s\n @comment.destroy\n redirect_to @issue_path, notice: 'Comment is succesfully destroyed.'\n else\n redirect_to @issue_path, notice: \"This comment wasn't created by you\"\n end\n end",
"def admin_remove_service_notification(service)\n @service = service\n subject = \"Service #{service.title} has been removed!!\"\n setup_email(ADMIN_EMAIL, subject, service.user.email)\n end",
"def delete_notifications\n Notification.where(origin_type: 'Message', origin_id: @message.id).destroy_all\n end",
"def destroy\r\n\t\t@notification = Notification.find(params[:id])\r\n\t\tif current_user != nil\r\n\t\t\tAction.create(info: current_user.username + ' has deleted this notification: (' + @notification.info + ') belonging to ' + User.find(@notification.user_id).username + '.', user_email: current_user.email)\r\n\t\telse\r\n\t\t\tAction.create(info: 'A system has deleted this notification: (' + @notification.info + ') belonging to ' + User.find(@notification.user_id).username + '.', user_email: 'SystemAdmin')\r\n\t\tend\r\n\t\t@notification.destroy\r\n\t\tredirect_to :back\r\n\tend",
"def remove_comment(xid,comment_id)\n post('facebook.comments.remove', :xid=>xid, :comment_id =>comment_id)\n end",
"def delete_comment user\n edit_comment user, nil\n end",
"def destroy_comment\n get_comment\n @pcp_item = @pcp_comment.pcp_item\n @pcp_subject = @pcp_item.pcp_subject\n if @pcp_subject.user_is_owner_or_deputy?( current_user, @pcp_comment.pcp_step.acting_group_index )\n if @pcp_comment.published?\n notice = 'pcp_comments.msg.cannot_del'\n else\n @pcp_comment.transction do\n @pcp_comment.destroy\n @pcp_item.update_new_assmt( nil )\n end\n notice = 'pcp_comments.msg.delete_ok'\n end\n respond_to do |format|\n format.html { redirect_to pcp_item_path( @pcp_item ), notice: t( notice )}\n end\n else\n render_no_permission\n end\n end",
"def destroy\n\t\t@comment = Comment.find( params[:id] )\n\t\t\n\t\tif @comment.commentable.class.comments_extension.can_remove?( @comment.commentable, @comment, current_user_get )\t\t\n\t\t\t@removed_ids = [ @comment.id ]\n\t\t\t\n\t\t\tfor comment in @comment.descendants do\n\t\t\t\t@removed_ids << comment.id\n\t\t\tend\n\t\t\t\n\t\t\t@comment.destroy\n\t\t\t\n\t\t\trespond_to do |format|\n\t\t\t\tformat.html { redirect_to comments_url }\n\t\t\t\tformat.xml { head :ok }\n\t\t\t\tformat.js\n\t\t\tend\n\t\telse\n\t\t\trespond_to do |format|\n\t\t\t\tformat.html { redirect_to comments_url }\n\t\t\t\tformat.xml { head :err }\n\t\t\t\tformat.js\t{ render :update do |page| page.alert \"You can't remove this comment\" end }\n\t\t\tend\n\t\tend\n\tend",
"def remove_pending\n authorize! :update, @user, :message => t('errors.messages.not_authorized_as_manager')\n\n @user = User.find(params[:id])\n @marina = Marina.find(params[:marina])\n @marina.pending_users.delete(@user)\n\n @user.marina_state= \"\"\n UserNotifier.remove_pending(@user).deliver\n @user.save\n @marina.save\n redirect_to marina_path(@marina), :notice => t('errors.messages.remove_pending')\n #\"Bertholder and marina are now connected. a notification email has been sent\"\n\n\n\n end",
"def destroy\n @pic.destroy\n\n notifications = Notification.find(:all, :conditions => ['notification_type = ? and object_id = ?', \"comment\", @pic.id])\n notifications.each do |n|\n n.destroy\n end\n respond_to do |format|\n format.html { redirect_to(root_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n client = current_user.find_client(params[:client_id])\n @notification = client.notifications.find(params[:id])\n @notification.destroy\n\n respond_to do |format|\n format.html { redirect_to client_notifications_url(client) }\n end\n end",
"def destroy\n authorize(Notification)\n if @notification.destroy\n msg = success_message(@notification, _('deleted'))\n redirect_to super_admin_notifications_path, notice: msg\n else\n flash.now[:alert] = failure_message(@notification, _('delete'))\n render :edit\n end\n end",
"def clean_pending_item_notifications!(owner_user_ids)\n owner_user_ids.each do|owner_user_id|\n pending_item_count = ::Item.pending.where(user_id: owner_user_id).count\n logger.info \" | user #{owner_user_id} has #{pending_item_count} pending items\"\n if pending_item_count < 1\n ::Users::Notification.where(sender_user_id: owner_user_id, related_model_type: 'Child',\n related_model_id: owner_user_id, title: ::Item::PENDING_ITEM_TITLE).\n destroy_all\n end\n end\n end",
"def remove_comment\n\t if current_usertype.account_id == params[:account_id].to_i || current_admin\n \t\tcomment = Comment.find(params[:id])\n \t\tcomment.destroy\n \t\tflash[:notice] = \"Comment removed\"\n \t\tredirect_to :controller => :projects, :action => :comments_show, :id => params[:project_id]\n\t else\n\t flash[:error] = \"You do not have access to this page\"\n\t redirect_to root_url\n\t end\n\tend",
"def removed(member)\n return unless member.idea.owner # => prevents sending when idea is deleted\n\n @member = member\n mail(:to => member.user.email, :subject => \"You have been removed from the \\\"#{member.idea.name}\\\" idea\", :tag => 'member-removed') \n end",
"def delete_comments\n end",
"def remove_message_reaction(data); end",
"def destroy\n \n \n #remove notification \n #remove plots \n\n\n if current_user.id==@tale.user_id\n notifications = Journal.where(notification_type: \"StoryCreate\", notification_id: @tale.id)\n notifications.each do|notification|\n notification.delete\n end \n\n\n @tale.destroy\n\n\n\n\n\n\n respond_to do |format|\n #format.html { redirect_to tales_url }\n format.html { redirect_to profiles_index_path(current_user.id) }\n format.json { head :no_content }\n end\n end\n\n\n end",
"def unconfirm_with_notify(another_user)\n return nil unless another_user.instance_of?(User) && @user.has_associated_swimmer? && another_user.has_associated_swimmer?\n\n result = UserSwimmerConfirmation.unconfirm_for(another_user, another_user.swimmer, @user)\n if result\n NewsFeed.create_social_feed(\n another_user.id,\n @user.id,\n I18n.t('newsfeed.unconfirm_title'),\n I18n.t('newsfeed.unconfirm_body').gsub('{SWIMMER_NAME}', another_user.swimmer.get_full_name)\n )\n # TODO: Block friendships also?\n # TODO Create also achievement accordingly\n end\n result\n end",
"def destroy\n @notification = current_user.notifications.find(params[:id])\n @notification.destroy\n\n respond_to do |format|\n format.html { redirect_to(notifications_url) }\n format.xml { head :ok }\n end\n end",
"def remove_posts usr\n @conv.posts.each do |post|\n if usr.id == post.user_id\n post.status = 'removed'\n elsif usr.id == post.recipient_id\n post.recipient_status = 'removed'\n end\n post.save\n end\n end",
"def destroy\n @admin_notification.destroy\n\n head :no_content\n end",
"def delete_notifications\n @tutor = Tutor.find(params[:id])\n Notification.where(user_type: 'Tutor', user_id: @tutor.id).destroy_all\n end",
"def create_post_owner_notification_of_reply(reply)\n return if reply.user.id == self.user.id # don't notify user of his own replies..\n if self.user.role == \"Poster\"\n url = \"wall_expert/#{self.id}\"\n else\n url = \"wall/#{self.id}\"\n end\n notify(self.user, \"Your post was commented on!\", \"#{reply.user.profile.full_name} commented on your post!\", :from => reply.user, :key => post_reply_notification_key(reply), :link => '#{url}?reply=#{reply.id}')\n end",
"def remove_from_notifications\n notifications.find_all { |n| n.noti_read == 'N' }.each do |n|\n remove_from_notification(n)\n end # each n\n end",
"def remove_comment repo, comment_id\n response = @@connection.delete do | request |\n request.url \"repos/#{repo}/issues/comments/#{comment_id}\"\n request.headers['Authorization'] = \"token #{@@token}\"\n end\n \n # look for Status: 204 No Content\n return if response.env[:status] != 204\n \n # Comment successfully deleted from GitHub so remove from comment.db\n @@db.execute \"delete from comment where id == #{comment_id}\"\n end",
"def notify\n reviewers = proposal.reviewers.reject{|r| r.id == user_id }\n Notification.create_for(reviewers, proposal: proposal, message: \"Internal comment on #{proposal.title}\")\n end",
"def remove_notifier(notifier)\n @notifiers.delete(notifier)\n end",
"def remove_message(name)\n\t\tend",
"def destroy\n #@event_event.destroy\n @event_event.deleted = true\n dest = @event_event.id\n type = 7 #event_notifications_code\n Notification.clear_notifications(type,dest)\n @event_event.save\n @event_event.user.remove_event\n respond_to do |format|\n format.html { redirect_to admin_event_events_url, notice: 'Event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy_comments\n Comment.where(user_id: id).destroy_all\n end",
"def reply_to_parent(workitem)\n\n @context.tracker.remove_tracker(h.fei)\n\n super(workitem)\n end",
"def deleteUserNotifications(u)\n @ns = getUserNotifications(u)\n @ns.each do |n|\n n.destroy\n end\n redirect_to \"/notifications\", notice: \"All notifications deleted.\"\n end",
"def destroy\n if @notification.destroy\n send_success_json(@notification.id, {:msg => \"deleted\"})\n else\n send_error_json(@notification.id, \"delete_error\", 400)\n end\n end",
"def destroy\n @notification.destroy\n head :no_content \n end",
"def destroy\n @comment = @posting.comments.find(params[:id])\n @comment.destroy\n\n respond_to do |format|\n #@comment.create_activity :destroy, owner: current_user\n format.html { redirect_to root_url }\n format.json { head :no_content }\n end\n end",
"def send_email_to_owner\n return if !self.support_case or self.support_case.contact == self.created_by\n if AppSetting.config('Send Comment Notifications') == AppSetting::SEND_COMMENT_NOTIFICATIONS[:send_notification]\n SfcontactMailer.deliver_owner_notification(self)\n end\n rescue\n logger.info(\"Error occured creating case comment notification email: #{$!.message}\")\n end",
"def delete_observer o\n @sub_lock.synchronize do\n subscribers.fetch(channel, {}).delete o\n end\n end",
"def remove_observer(o)\n @cpg.remove_observer(o)\n end",
"def deleteEventNotifications(u, e)\n @ns = getEventNotifications(u, e)\n @ns.each do |n|\n n.destroy\n end\n if u != nil\n redirect_to \"/notifications\", notice: \"All notifications for \" + Event.find_by(id: e).name + \" deleted.\"\n end\n end",
"def unowner(jid,reason=nil,&blk)\n set_affiliation 'admin', jid, reason, &blk\n end",
"def destroy_with_post_changes\n destroy_without_post_changes\n\n if params['object_type'] == \"issue\"\n user = User.find(params[:user_id])\n issue = Issue.find(params[:object_id])\n post_comment(issue, \"Watchers removed: #{user.name}\")\n end\n end",
"def destroy\n @comment.destroy\n end",
"def came_comment\n @user = @receiver\n path = \"/#{@comment.commentable_type.downcase.pluralize}/#{@comment.commentable.id}\"\n @notification = @user.notifications.find_by(path: path)\n mail to: @user.email, subject: \"[bootcamp] #{@message}\"\n end",
"def delete_notification!(id: nil)\n add_to_task_queue do\n notification_data_source.delete_notification!(id: id)\n end\n end",
"def trash_comment(comment)\n trash_recording(comment)\n end",
"def comment_notification(user_id, comment_id)\n user = User.find(user_id)\n @comment = Comment.find(comment_id)\n mail(\n :to => user.email,\n :subject => \"[#{ArchiveConfig.APP_NAME}] Comment on \" + @comment.ultimate_parent.commentable_name.gsub(\">\", \">\").gsub(\"<\", \"<\")\n )\n end",
"def edited_comment_notification(user_id, comment_id)\n user = User.find(user_id)\n @comment = Comment.find(comment_id)\n mail(\n :to => user.email,\n :subject => \"[#{ArchiveConfig.APP_NAME}] Edited comment on \" + @comment.ultimate_parent.commentable_name.gsub(\">\", \">\").gsub(\"<\", \"<\")\n )\n end",
"def delete\n @notification = Users::Notification.find_by_id(params[:id])\n @notification.set_status_deleted\n\n respond_to do |format|\n format.html { redirect_to notification_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n UserAudit.create({:user => current_user, :action => \"destroyed toil request\", :end_user => @toil_request.user.email})\n @comment = toil_request_params[:comment]\n UserMailer.reject_toil(@toil_request.user, @toil_request, @comment, current_user.email).deliver\n @toil_request.destroy\n respond_to do |format|\n format.html { redirect_to toil_requests_url }\n format.json { head :no_content }\n end\n end",
"def deliver_security_notification_destroy\n deliver_security_notification(\n recipients: [address],\n message: :mail_body_security_notification_remove,\n field: :field_mail,\n value: address\n )\n end",
"def destroy\n @mailing = Mailing.find(params[:id])\n\n if !@mailing.comments.blank?\n @mailing.comments.each do |comments| \n if comments.user_id == @mailing.user_id \n @mailing.destroy\n redirect_to root_url, notice: 'Mailing deletado com sucesso!' \n else \n flash.now[:errors] = 'Você precisa adicionar um novo comentário antes de Excluir este Mailing!!!'\n @comment = Comment.new\n end\n end\n else\n flash.now[:errors] = 'Você precisa adicionar um novo comentário antes de Excluir este Mailing!!!'\n @comment = Comment.new\n end\n end",
"def delete_notification\n\n @greeting = \"Hi\"\n\n mail to: \"seekle.info@gmail.com\",subject:\"ユーザー退会した!\"\n \nend",
"def destroy\n @notification = Notification.find(params[:id])\n @notification.destroy\n\n respond_to do |format|\n format.html { redirect_to notifications_url }\n end\n end",
"def destroy\n @comment = Comment.find(params[:id])\n @message = @comment.message\n @comment.destroy if current_user == @comment.user\n\n respond_to do |format|\n format.html { redirect_to(@message) }\n format.js do\n render :update do |page|\n page.remove \"comment_#{params[:id]}\"\n end\n end\n format.xml { head :ok }\n end\n end",
"def collapse_comment_on_comment(id)\n unless session[:shown_reply_to_comment].nil? ||\n\tsession[:shown_reply_to_comment].empty?\n session[:shown_reply_to_comment].delete(id) if session[:shown_reply_to_comment].include? id\n end\n end",
"def destroy\n @notification.destroy\n return_back_or_ajax\n end",
"def delete_comment(id)\n record \"/msg/delete_comment/#{id}\"\n end",
"def unassigned_agenda_item(agenda_item, old_owner, author)\n Notifications.agenda_item_unassigned(agenda_item, old_owner, author).deliver_later\n end",
"def unsubscribe_from_notifications(client, collection)\n mirror = client.discovered_api('mirror', 'v1')\n result = client.execute(\n :api_method => mirror.subscriptions.delete,\n :parameters => { 'id' => collection })\n if result.error?\n puts \"An error occurred: #{result.data['error']['message']}\"\n end\n end",
"def comment_sent_notification(comment_id)\n @comment = Comment.find(comment_id)\n @noreply = true # don't give reply link to your own comment\n mail(\n :to => @comment.comment_owner_email,\n :subject => \"[#{ArchiveConfig.APP_NAME}] Comment you left on \" + @comment.ultimate_parent.commentable_name.gsub(\">\", \">\").gsub(\"<\", \"<\")\n )\n end",
"def remove_badge!\n self.badge_token = nil\n self.badge_token_set_at = nil\n self.save!\n end",
"def remove(guest)\n remove_dishes(guest)\n guests.delete(guest)\n guest.attending_events.delete(self)\n end",
"def clear_comment\n @comment = nil\n end",
"def before_destroy(work)\n if work.posted?\n users = work.pseuds.collect(&:user).uniq\n orphan_account = User.orphan_account\n unless users.blank?\n for user in users\n unless user == orphan_account\n # this has to use the synchronous version because the work is going to be destroyed\n UserMailer.delete_work_notification(user, work).deliver! \n end\n end\n end\n end\n end",
"def destroy\n hack = @comment.hack\n @comment.destroy\n redirect_to hack_path(hack.id)\n end",
"def destroy\n @notification_restriction = NotificationRestriction.find(params[:id])\n @notification_restriction.destroy\n\n respond_to do |format|\n format.html { redirect_to notification_restrictions_url }\n format.json { head :ok }\n end\n end",
"def comment_replied_to(options = {})\n send_comment_notification(options, \"reply_notice\")\n end",
"def delete_comment(permlink)\n @operations << {\n type: :delete_comment,\n author: account_name,\n permlink: permlink\n }\n \n self\n end",
"def send_delete_event_notification(event, user)\n if event.notify?\n if user.use_email? && user.confirmed? && event.use_email?\n UserMailer.event_delete_email(user, event).deliver rescue user\n end\n if user.use_text? && user.confirmed_text? && event.use_text?\n success, error = TwilioHandler.new.send_text(user, t('texts.deleted_event', params: get_event_text_params(event, false), type: event.eventType.capitalize))\n end\n if user.use_call? && user.confirmed_call? && event.use_call?\n success, error = TwilioHandler.new.send_call(user, t('texts.deleted_event', params: get_event_text_params(event, false), type: event.eventType.capitalize))\n end\n end\n end",
"def destroy\n @comment = @commentable_collection.find(params[:id])\n if @comment.user == current_user\n @comment.destroy\n respond_to do |format|\n format.html { redirect_to(posts_url, :notice => \"Your #{@comment.class.to_s.downcase} has been deleted\") }\n format.xml { head :ok }\n end\n else\n respond_to do |format|\n format.html { redirect_to(posts_url, :notice => \"You can't delete someone else's #{@comment.class.to_s.downcase}\") }\n end\n end\n end",
"def update_owner(user)\n self.notify = true if self.owner != user\n self.owner = user unless user.nil?\n self.save\n end",
"def delete_notifier(name)\n @notifiers.delete(name) if defined? @notifiers\n end",
"def destroy\n @user = User.current_user\n @comment = Comment.find(params[:id])\n \n if @comic.created_by == @user || @comment.created_by == @user\n @comment.destroy\n \n respond_to do |format|\n format.html { \n flash[:notice] = 'Comment was successfully deleted.'\n redirect_to comic_path(:id => @comic.alias, :page => @comment.page.locate) }\n format.xml { head :ok }\n end\n else\n flash[:notice] = \"You can't do that.\"\n respond_to do |format|\n format.html { redirect_to comic_path(:id => @comic.alias, :page => @comment.page.locate) }\n format.xml { head :ok }\n end\n end\n end",
"def declines!(submission)\n collaboration = collaborations.where(submission: submission).first\n # Remove the associated notification.\n notifications.where(notifyable_type: 'Collaboration', notifyable_id: collaboration.id).first.destroy\n collaboration.destroy\n end",
"def delete_Notification\n dest = Notification.where(id: params[:idNotification]).first\n if (dest) \n Notification.where(id: params[:idNotification]).destroy_all\n render json: { status: 'SUCCESS', message: 'ELIMINACION EXITOSA'}, status: :ok\n else\n render json: { status: 'INVALID', message: 'NOTIFICACION NO ENCONTRADA'}, status: :unauthorized\n end\n end",
"def destroy\n UserAudit.create({:user => current_user, :action => \"destroyed toil spend request\", :end_user => @spend_toil.user.email})\n @comment = spend_toil_params[:comment]\n @spend_toil.destroy\n UserMailer.reject_toil_spend(@spend_toil.user, @spend_toil, @comment, current_user).deliver\n respond_to do |format|\n format.html { redirect_to spend_toils_url }\n format.json { head :no_content }\n end\n end",
"def came_comment\n @user = @receiver\n link = \"/#{@comment.commentable_type.downcase.pluralize}/#{@comment.commentable.id}\"\n @notification = @user.notifications.find_by(link: link) || @user.notifications.find_by(link: \"#{link}#latest-comment\")\n mail to: @user.email, subject: \"[FBC] #{@message}\"\n end",
"def remove_posts usr\n ConversationProcessor.new(self).remove_posts usr\n end",
"def index\n @notifications = Notification.where user_id: current_user.id, seen: nil\n @notifications = @notifications.sort { | x , y | x.seen? ? 1:0 <=> y.seen? ? 0:1}\n\n @notifications.each do |msg|\n msg.destroy\n end\n\n end"
] | [
"0.6999875",
"0.69937617",
"0.6849901",
"0.67677766",
"0.6663127",
"0.659714",
"0.6530698",
"0.6475304",
"0.63988656",
"0.6389776",
"0.63698304",
"0.6322537",
"0.6279924",
"0.6260112",
"0.6244098",
"0.6233276",
"0.6212999",
"0.61734164",
"0.613409",
"0.59765583",
"0.5971494",
"0.597046",
"0.595284",
"0.594025",
"0.5931571",
"0.59052163",
"0.58819056",
"0.58750576",
"0.5832598",
"0.5826324",
"0.5816412",
"0.5759997",
"0.5751917",
"0.5745317",
"0.57292855",
"0.5729242",
"0.572489",
"0.5716937",
"0.57163566",
"0.5714955",
"0.57129616",
"0.57125825",
"0.5709951",
"0.5691396",
"0.5678885",
"0.56681746",
"0.5664381",
"0.56470805",
"0.56284285",
"0.56251293",
"0.56228226",
"0.56177425",
"0.560865",
"0.5604621",
"0.5601045",
"0.5590955",
"0.5588493",
"0.5576826",
"0.55641675",
"0.5563111",
"0.55555457",
"0.5554556",
"0.5553938",
"0.55448186",
"0.554437",
"0.55420035",
"0.5533538",
"0.55300593",
"0.55236685",
"0.55211437",
"0.5517825",
"0.5510939",
"0.5491941",
"0.54831505",
"0.54830045",
"0.5480011",
"0.5473575",
"0.546964",
"0.5469129",
"0.5468355",
"0.54656273",
"0.5464907",
"0.5462835",
"0.54572487",
"0.54483366",
"0.544212",
"0.5437428",
"0.5436037",
"0.5435278",
"0.5430798",
"0.54272443",
"0.5427087",
"0.5424598",
"0.5414277",
"0.54136616",
"0.54096377",
"0.5407704",
"0.5406823",
"0.5388834",
"0.53887695"
] | 0.7223053 | 0 |
def set_of_instructions def initialize (instruction) end end | def do_instruction(x)
case x
when x = "L"
then turn_left
when x = "R"
then turn_right
when x = "M"
then move_forward
else
puts "This is not a valid instruction"
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(instructions)\n\t\t@operands = Array.new\n\t\t@operators = Array.new\n\t\t@errors = Array.new\n\t\t@instructions = instructions\n\tend",
"def initialize(inst_set, opcode, params=[])\n @instruction_set = inst_set\n @opcode = opcode\n @params = params\n end",
"def initialize (instructions, id)\n @id = id\n @instructions = instructions\n end",
"def instructions\n @instructions ||= InstructionCollection.new(self)\n end",
"def instructions\n \n end",
"def initialize(input_string:)\n @coordinator = InstructionsParser.parse(input_string: input_string)\n end",
"def instructions; raise NotImplementedError\n end",
"def instruction name, instruction\n end",
"def initialize(instructions = nil, balance: 50, &block)\n if block_given?\n @instructions = InstructionBuilder.new(&block).instructions\n else\n @instructions = instructions\n end\n\n @balance = balance\n @storage = Storage.new\n end",
"def initialize(parent, id, instructions)\n @parent = parent\n @id = id\n @target = (id == 0 ? 1 : 0)\n\n @registers = Array('a'..'z').zip(Array.new(26, 0)).to_h\n @registers['p'] = id\n\n @instructions = instructions.dup\n @ic = 0 # Instruction counter\n\n @waiting = false\n @receiver = nil\n end",
"def initialize_instruction_debug_table\n @instruction_debug_table = {}\n\n @@instructions.each do |i|\n @instruction_debug_table[i[:op]] = {\n :mnemonic => i[:mnemonic],\n :args => i[:args]\n }\n end\n end",
"def initialize\n\t\t@operands = []\n\tend",
"def initialize\n @instr_type = MACRO\n @sym_name = UNINITIALIZED\n @args = UNINITIALIZED\n \n @addr = UNINITIALIZED\n end",
"def initialize(operand, attributes, allbut, expr = nil, compiler = nil)\n super(expr, compiler)\n @operand = operand\n @attributes = attributes\n @allbut = allbut\n end",
"def initialize(name, *instructions)\n @name = name\n @instructions = build_instructions(instructions)\n @tags = []\n end",
"def initialize(operand, attribute, expr = nil, compiler = nil)\n super(expr, compiler)\n @operand = operand\n @attribute = attribute\n end",
"def instructions str = nil, &block\n return @instructions if str.nil? && !block_given?\n @instructions = str || block.call\n end",
"def instr_INST(instruction_stack)\n\t\tmodname = read_nl_string(instruction_stack)\n\t\tclsname = read_nl_string(instruction_stack)\n\t\targs = slice_to_mark()\n\t\t@stack.push({\"__init_arg\" => args, \"__module\" => modname, \"__class\" => clsname})\n\tend",
"def initialize(code)\n @CODE = code # The program\n @REG = Array.new(16, 0) # The Registers\n @IP = 0 # Instruction Pointer\n @CO = 0 # Current Opcode\n end",
"def initialize(literal)\n @literal = literal\n @lemmata = []\n end",
"def initialize check_performance: false\n @instructions = []\n @labels = {}\n @check_performance = check_performance\n @performance_warnings = []\n # A set of temp registers recorded as we see them (not at allocation time)\n @temp_registers = Set.new\n yield self if block_given?\n end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize()\n #@source = aSourceTemplate\n #@representation = compile(aSourceTemplate)\n end",
"def instructions(incoming_instructions)\n incoming_instructions.chars.map(&INSTRUCTION)\n end",
"def instructions(lines)\n lines.map do |l|\n parts = l.split(\" \")\n inst = []\n inst[INST_COMMAND] = parts[0]\n inst[INST_VALUE] = parts[1].to_i\n inst[INST_VISITED] = false\n\n inst\n end\nend",
"def initialize\n @instr_type = UNINITIALIZED\n @addr = UNINITIALIZED\n end",
"def instructions\n if read_inheritable_attribute(:instructions).nil?\n write_inheritable_attribute(:instructions, {})\n end\n read_inheritable_attribute(:instructions)\n end",
"def initialize(source); end",
"def initialize(p1,p2)\n ori_init\n push p1\n push p2\n self[0].id = 0\n self[0].mark_markup = $glob.x_markup\n self[1].id = 1\n self[1].mark_markup = $glob.o_markup \n @act_p = self[0]\n end",
"def initialize(str)\n @expression = str\n end",
"def define(instruction)\n @def_instr = instruction\n end",
"def initialize(markup,x,y)\n ori_init(nil,'')\n init_vars(markup,x,y)\n declare_signals\n end",
"def on(instruction, arguments, &bloc)\n @instructions[instruction] = [arguments, bloc]\n end",
"def add_instruction(*parts)\n @program.add Verneuil::Instruction.new(*parts)\n end",
"def initialize(operand, by, summarization, allbut, expr = nil, compiler = nil)\n super(expr, compiler)\n @operand = operand\n @by = by\n @summarization = summarization\n @allbut = allbut\n end",
"def initialize(*arguments)\n @arguments = arguments\n @operations = []\n end",
"def get_instructions\n @instructions = Instructions.new('./lib/instructions.txt')\n @instructions.create_parameters_from_input\n end",
"def initialize(opcode, types, operands, destination, branch_destination, branch_condition, length)\n @opcode = opcode\n @types = types\n @operands = operands\n @destination = destination\n @branch_to = branch_destination\n @branch_on = branch_condition\n @length = length\n end",
"def initialize(*args)\n super\n if index = @args.index(\"in\")\n @block_params = @args[0..index-1]\n @value = @args[index+1]\n else\n @block_params = []\n @value = @args.first\n end\n @switched = false\n @commands = NodeList.new(self)\n @else_commands = NodeList.new(self)\n @in_else = false\n end",
"def initialize(x, y) #Code Type M\n @x = x #Code Type M\n @y = y #Code Type A\n end",
"def initialize(str)\n @expression = str\n @infix_tree = nil\n @value = nil\n end",
"def initialize(bytecode)\n @instructions = bytecode.instructions\n @constants = bytecode.constants\n @stack = Array.new(STACK_SIZE)\n @sp = 0\n end",
"def constructor; end",
"def initialize(expression, block)\n @expression = expression\n @block = block\n end",
"def initialize(operand, as, expr = nil, compiler = nil)\n super(expr, compiler)\n @operand = operand\n @as = as\n end",
"def initialize(operands, expr = nil, compiler = nil)\n super(expr, compiler)\n @operands = operands\n end",
"def initialize\n\t\t@calculator = []\n\tend",
"def initialize(*) \n end",
"def initialize(*)\n super\n machine\n end",
"def initialize(controlable, happened, task, symbol)\n\t\t@controlable = controlable\n\t\t@happened = happened\n\t\t@task = task\n\t\t@symbol = symbol\n\t end",
"def initialize(code = T.unsafe(nil), options = T.unsafe(nil)); end",
"def initialize ( )\n @buffer = [] # буфер для символов входного потока\n @states = [] # массив с состояниями парсера, <ParserState> объекты\n @chain = [] # цепочка работающих состояний\n #\n # Машина состояний для метода classify.\n #\n @matchstate = {\n\t:state => 0,\n\t:index => 0\n }\n @parserstate = ''\n end",
"def initialize() end",
"def initialize( &block )\n\t\t@variables = Set.new\n\t\t@constants = Set.new\n\t\t@axiom = nil\n\t\t@rules = []\n\n\t\t@rules_as_hash = nil\n\n\t\tself.instance_eval( &block ) if block\n\tend",
"def instructions\n @instructions ||= STDIN.read.split(\"\\n\")\nend",
"def initialize()\n self.kind = CIAT::Processors::Interpreter.new\n self.description = \"Parrot virtual machine\"\n self.libraries = []\n yield self if block_given?\n end",
"def initialize\n @binary_input = nil\n @current_token = nil\n @debug = false\n @s = nil\n @tokens = []\n end",
"def initialize command_string\n @cmds = parse_command_string( command_string.to_s )\n end",
"def initialize\r\n @code = ''\r\n @tags = []\r\n end",
"def activation_instructions\n end",
"def add_instruction(instruction)\n raise ArgumentError, \"Instruction must be of type 'Instruction'\" unless instruction.is_a?(Instruction)\n @instructions << instruction\n end",
"def initialize(operator, less)\n @operator = operator \n @less = less\n end",
"def initialize\n\t\t@tokenizer = Lexer.new # why the defined method is initialize and the called method is new mystifies me\n\t\t@token = nil\n\t\t@blocklevel = 0\n\t\t@node = nil\n\t\t@sav = nil\n\tend",
"def initialize(commands)\n @cmds = commands\n end",
"def initialize(*)\n super\n @operand = optimize_operand\n end",
"def initialize\n @set = BitSet.new\n @endOf = Sym::NoSym\n @ctx = false\n @correct = true\n end",
"def initialize\n @scripts = enumerate_scripts\n end",
"def initialize(code)\n @code = code\n \n IceNine.deep_freeze(self)\n end",
"def initialize(memory = [], debug_mode = false)\n @memory = memory.clone\n @ip = 0\n @modes = []\n @opcode = 0\n @input = []\n @output = []\n @base = 0\n @debug_mode = debug_mode\n @status = STATUS_RUN\n end",
"def initialize(*); end",
"def initialize(symbol: raise, offset: raise, pos: raise, source: raise)\n @symbol, @offset, @pos, @source = symbol, offset, pos, source\n @target = source.slice!(2,2)\n end",
"def initialize(step)\n @innerCode = ''\n @params_line = step[0][step[0].index('/')+1..step[0].rindex('/')-1]\n if step[0].include? \"|\"\n parametersLine = step[0][step[0].index('|')+1..step[0].rindex('|')-1]\n end\n @parameters = pars_parameters parametersLine\n @numberOfArguments = @parameters.size\n @feature_lines = Array.new\n end",
"def initialize(a, b) end",
"def initialize ast\n @ast = ast\n end",
"def initialize(state); end",
"def initialize(specifications = T.unsafe(nil)); end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize(tokens, verbose)\n @tokens = tokens\n @verbose = verbose\n @du = []\n @stringToEval = \"\"\n @bcounter = 0\n end",
"def initialize\n super\n\n @global_code = []\n @control_code = []\n @control_string = ''\n end",
"def initialize *a, &b\n @version = nil\n @leftovers = []\n @specs = {}\n @long = {}\n @short = {}\n @order = []\n @constraints = []\n @stop_words = []\n @stop_on_unknown = false\n\n #instance_eval(&b) if b # can't take arguments\n cloaker(&b).bind(self).call(*a) if b\n end",
"def initialize(*)\n end",
"def initialize(*)\n end",
"def initialize(promotion, base_code, num_codes)\n @base_code = base_code\n @num_codes = num_codes\n @promotion = promotion\n end",
"def initialize\n @instr_type = LABEL_GOTO\n @label_addr = UNINITIALIZED\n end",
"def initialize(p0=\"\") end",
"def initialize(operand, defaults, expr = nil, compiler = nil)\n super(expr, compiler)\n @operand = operand\n @defaults = defaults\n end",
"def processing_instruction(name, content); end"
] | [
"0.7554903",
"0.73453623",
"0.7246319",
"0.6981637",
"0.6927867",
"0.67096573",
"0.6660172",
"0.6649419",
"0.6593684",
"0.6496055",
"0.64936775",
"0.6469679",
"0.64469993",
"0.6397139",
"0.6390382",
"0.6352654",
"0.62854266",
"0.62737554",
"0.62690496",
"0.62635577",
"0.62416315",
"0.62371176",
"0.62371176",
"0.62371176",
"0.62371176",
"0.62371176",
"0.62321883",
"0.6200125",
"0.61726403",
"0.6169932",
"0.61695886",
"0.61688185",
"0.61672604",
"0.61627185",
"0.6135476",
"0.61021364",
"0.61001676",
"0.6096833",
"0.6094298",
"0.60917753",
"0.60858953",
"0.6081796",
"0.60645014",
"0.6062653",
"0.60524344",
"0.60493165",
"0.6039897",
"0.60298896",
"0.602531",
"0.5990081",
"0.598419",
"0.59641755",
"0.59494454",
"0.59456766",
"0.59391665",
"0.59226525",
"0.5913791",
"0.5900706",
"0.5900299",
"0.58855987",
"0.58608437",
"0.5858348",
"0.5855187",
"0.585386",
"0.5848946",
"0.5848926",
"0.58451766",
"0.5839723",
"0.58265924",
"0.58201617",
"0.5816449",
"0.58142656",
"0.58018243",
"0.5796012",
"0.5792889",
"0.5791325",
"0.5775168",
"0.5770182",
"0.5769917",
"0.57687956",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759708",
"0.5759652",
"0.57575953",
"0.5756967",
"0.5750973",
"0.5750973",
"0.5742177",
"0.5741013",
"0.57375985",
"0.57374114",
"0.57367575"
] | 0.0 | -1 |
'anchor' is one of the ANCHOR_ constants above. 'where' is the pixel location to slide around, and slide_direction is +/1 to indicate which way to slide when showing. 'displacement' is where along the nonanchored axis the box should be. | def initialize(anchor, where, slide_direction, disp = 0)
super()
@rl = Opal::ResourceLocator.instance
@bgcolor = Rubygame::Color[:blue]
@border = Rubygame::Color[:white]
@anchor = anchor
@where = where
@slide_direction = slide_direction
@displacement = disp
@slide_offset = 0
@speed = 120
setup_gui
hide
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def showing?\n return true if @state == STATE_DISPLAYED\n ext = extent\n case(@anchor)\n when ANCHOR_TOP\n return @slide_offset >= 0\n when ANCHOR_LEFT\n return @slide_offset >= 0\n when ANCHOR_BOTTOM\n return @slide_offset <= -ext\n when ANCHOR_RIGHT\n return @slide_offset <= -ext\n end\n end",
"def anchor_rect anchor_x, anchor_y\n current_w = self.w\n current_h = self.h\n delta_x = -1 * (anchor_x * current_w)\n delta_y = -1 * (anchor_y * current_h)\n self.shift_rect(delta_x, delta_y)\n end",
"def anchor(name)\n mark_anchor(name).left(lambda {\n s=scene.target[self.index]\n case self.name\n when 'bottom' then s.left;\n when 'top' then s.left;\n when 'center' then s.left;\n when 'left' then nil;\n else\n s.left+s.shape_radius\n end\n }).right(lambda {\n s=scene.target[self.index]\n self.name()=='left' ? s.right+s.shape_radius : nil\n }).top(lambda {\n s=scene.target[self.index]\n case self.name\n when 'left' then s.top;\n when 'right' then s.top;\n when 'center' then s.top;\n when 'top' then nil;\n else\n s.top+s.shape_radius\n end\n }).bottom(lambda {\n s=scene.target[self.index]\n self.name()=='top' ? s.bottom+s.shape_radius : nil\n }).text_align(lambda {\n case self.name\n when 'left' then 'right';\n when 'bottom' then 'center';\n when 'top' then 'center';\n when 'center' then 'center';\n else\n 'left'\n end\n }).text_baseline( lambda {\n case self.name\n when 'right' then 'middle';\n when 'left' then 'middle';\n when 'center' then 'middle';\n when 'bottom' then 'top';\n else\n 'bottom'\n end\n \n })\n end",
"def setup_gui\n x = x_for_anchor(@anchor).to_i\n y = y_for_anchor(@anchor).to_i\n translate_to(x,y)\n end",
"def slide(x, y, dur, jump, height = 0)\n slide_x = self.x + x\n slide_y = self.y + y\n goto(slide_x, slide_y, dur, jump, height) unless moving?\n end",
"def slide(x, y, dur, jump, height = 0)\n slide_x = self.x + x\n slide_y = self.y + y\n goto(slide_x, slide_y, dur, jump, height) unless moving?\n end",
"def hide\n @state = STATE_HIDDEN\n \n if @anchor == ANCHOR_TOP || @anchor == ANCHOR_LEFT\n @slide_offset = extent * -@slide_direction - 1\n else\n @slide_offset = 0\n end\n setup_gui\n end",
"def slide(dir)\n @row, @col = slide_pos(dir)\n make_king\n nil\n end",
"def setup_slide\n return TSBS.error(@acts[0], 4, @used_sequence) if @acts.size < 5\n stop_all_movements\n xpos = (flip && !@ignore_flip_point ? -@acts[1] : @acts[1])\n ypos = @acts[2]\n slide(xpos, ypos, @acts[3], @acts[4], @acts[5] || 0)\n end",
"def anchor( val )\n case val\n when :c; @anchor = :middle\n when :l; @anchor = :start\n when :r; @anchor = :end\n else @anchor = :start\n end\n self\n end",
"def hidden?\n return true if @state == STATE_HIDDEN\n ext = extent\n case(@anchor)\n when ANCHOR_TOP\n return @slide_offset <= -ext\n when ANCHOR_LEFT\n return @slide_offset <= -ext\n when ANCHOR_BOTTOM\n return @slide_offset >= 0\n when ANCHOR_RIGHT\n return @slide_offset >= 0\n end\n end",
"def update_placement\n self.x = (Graphics.width - self.width) / 2\n self.y = (Graphics.height - self.height) / 2\n end",
"def slide_from(direction, options={}, more_options={}, &after)\n if options.is_a? Numeric\n size = options\n options = more_options\n else\n size = options[:size]\n end\n\n options[:from_current] = false unless options.key?(:from_current)\n window_size = UIApplication.sharedApplication.windows[0].frame.size\n\n case direction\n when :left\n size ||= window_size.width\n self.center = CGPoint.new(self.center.x - size, self.center.y)\n self.delta_to([size, 0], options, &after)\n when :right\n size ||= window_size.width\n self.center = CGPoint.new(self.center.x + size, self.center.y)\n self.delta_to([-size, 0], options, &after)\n when :top, :up\n size ||= window_size.height\n self.center = CGPoint.new(self.center.x, self.center.y - size)\n self.delta_to([0, size], options, &after)\n when :bottom, :down\n size ||= window_size.height\n self.center = CGPoint.new(self.center.x, self.center.y + size)\n self.delta_to([0, -size], options, &after)\n else\n raise \"Unknown direction #{direction.inspect}\"\n end\n return self\n end",
"def anchor=(value)\n @anchor = value\n end",
"def setup_target_slide\n return TSBS.error(@acts[0], 4, @used_sequence) if @acts.size < 5\n args = [@acts[1], @acts[2], @acts[3], @acts[4], @acts[5] || 0]\n if area_flag\n target_array.each do |target|\n target.slide(*args)\n end\n return\n end\n target.slide(*args)\n end",
"def setup_slide\n return unless PONY::ERRNO.check_sequence(current_act)\n stop_all_movements\n xpos = (flip && !@ignore_flip_point ? -@acts[1] : @acts[1])\n ypos = @acts[2]\n slide(xpos, ypos, @acts[3], @acts[4], @acts[5] || 0)\n @acts[4].times do\n method_wait\n end\n end",
"def slide(direction, ensured_position)\n sibling = get_sibling(direction)\n\n if sibling\n slide_to_sibling(direction, sibling, ensured_position)\n else\n push(direction == :increase ? :highest : :lowest)\n end\n end",
"def anchor_from(my_point, parent, parent_point, x_offset, y_offset)\n my_point = alignment_data_transformer(my_point)\n parent_point = alignment_data_transformer(parent_point)\n parent = $last_ui if parent == :previous\n set_layout_data com.ardor3d.extension.ui.layout.AnchorLayoutData.new(my_point, parent, parent_point, x_offset, y_offset)\n end",
"def placement x, y, face\n @pos_x = x\n @pos_y = y\n @face = face\n self\n end",
"def next_position\n return unless placed?\n axis = case direction\n when 'east', 'west'; :x\n when 'north', 'south'; :y\n end\n amount = case direction\n when 'north', 'east'; +1\n when 'south', 'west'; -1\n end\n [@x + (axis == :x ? amount : 0), @y + (axis == :y ? amount : 0)]\n end",
"def move_pointer(dy, make_space = false)\n @y -= dy\n if @y < @bottom_margin\n start_new_page\n @y -= dy if make_space\n elsif @y > absolute_top_margin\n @y = absolute_top_margin\n end\n end",
"def place(transition)\n newx, newy, newdir = transition.args\n @posx = newx.to_i\n @posy = newy.to_i\n @dir = newdir\n activate\n end",
"def pan_view( x = 1, do_display = DO_DISPLAY )\n old_left_column = @left_column\n @left_column = [ @left_column + x, 0 ].max\n record_mark_start_and_end\n display if do_display\n @left_column - old_left_column\n end",
"def setup_target_slide\n return unless PONY::ERRNO::check_sequence(current_act)\n args = [@acts[1], @acts[2], @acts[3], @acts[4], @acts[5] || 0]\n current_action_targets.each do |target|\n target.slide(*args)\n end\n end",
"def move(during, on, start_x, start_y, end_x, end_y, distortion: :UNICITY_DISTORTION,\n time_source: :GENERIC_TIME_SOURCE)\n Dim2Animation.new(during, on, :set_position, start_x, start_y, end_x, end_y,\n distortion: distortion, time_source: time_source)\n end",
"def update_arrows(current_pages)\n @dx = 0\n self.ox = 0\n offset = standard_padding * 4\n @cw = @ori_contents_width\n lv = true if @page_index > 0\n rv = current_pages[(@page_index + 1) * line_max].nil? ? false : true\n if lv # show left arrow\n @cw += offset\n @dx = offset\n self.ox = offset\n end\n @cw += offset if rv # show right arrow\n create_contents\n end",
"def move\n check_placed\n new_position = case facing\n when :north then @position.inc_y\n when :south then @position.dec_y\n when :east then @position.inc_x\n when :west then @position.dec_y\n end\n check_position(new_position)\n @position = new_position\n end",
"def clicked(position:)\n d = position.dist(@location)\n return unless d < @mass\n\n @dragging = true\n @drag_offset = @location - position\n end",
"def addPositioned(w, left, top)\n # In order to avoid the potential for a flicker effect, it is necessary\n # to set the position of the widget before adding it to the AbsolutePanel.\n # The Widget should be removed from its parent before any positional\n # changes are made to prevent flickering.\n w.removeFromParent\n DOM.setAbsolutePixelPosition(w.getElement, left, top)\n add(w)\n end",
"def shift(during, on, start_x, start_y, end_x, end_y, distortion: :UNICITY_DISTORTION,\n time_source: :GENERIC_TIME_SOURCE)\n Dim2Animation.new(during, on, :set_origin, start_x, start_y, end_x, end_y,\n distortion: distortion, time_source: time_source)\n end",
"def move_discreet(during, on, start_x, start_y, end_x, end_y, distortion: :UNICITY_DISTORTION,\n time_source: :GENERIC_TIME_SOURCE)\n Dim2AnimationDiscreet.new(during, on, :set_position, start_x, start_y, end_x, end_y,\n distortion: distortion, time_source: time_source)\n end",
"def clicked(position:)\n d = position.dist(location)\n return unless d < mass\n\n @dragging = true\n @drag_offset = location - position\n end",
"def start_at(x = 0, y = 0)\n @graphic_frame.anchor.start_at(x, y)\n end",
"def clicked(position:)\n d = position.dist(location)\n return unless d < @mass\n\n @dragging = true\n @drag_offset = location - position\n end",
"def extend_anchor(anchor, orientation, length)\n positions = [anchor]\n (length - 1).times do\n next_position = get_next_position(positions[-1], orientation)\n positions.push(next_position)\n end\n positions\n end",
"def initialize\n @x\n @y\n @facing\n @placed = false\n end",
"def classicAnchor\n @classicAnchor.bool\n end",
"def orbit_slideshow_frame(slide, position=0)\n link_options = {}\n if slide.is_a?(Artist)\n artist = slide\n artist_brand = artist.artist_brands.where(brand_id: website.brand_id).first\n\n slide_content = link_to(artist) do\n image_tag(artist.artist_photo.url(:feature), alt: artist.name, lazy: false) +\n content_tag(:div, class:\"orbit-caption\") do\n content_tag(:h2, artist.name) +\n content_tag(:p, artist_brand.intro.to_s.html_safe)\n end\n end\n elsif slide.string_value.to_s.match(/^((?:https?:)?\\/\\/)?((?:www|m)\\.)?((?:youtube\\.com|youtu.be))(\\/(?:[\\w\\-]+\\?v=|embed\\/|v\\/)?)([\\w\\-]+)(\\S+)?$/i)\n video_id = $5\n\n slide_content = link_to(play_video_url(video_id), target: \"_blank\", class: \"start-video\", data: { videoid: video_id } ) do\n image_tag(slide.slide.url, lazy: false)\n end\n else\n slide_link = (slide.string_value =~ /^\\// || slide.string_value =~ /^http/i) ? slide.string_value : \"/#{params[:locale]}/#{slide.string_value}\"\n\n if slide.string_value.to_s.match(/http/i)\n unless slide.string_value.to_s.match(/#{website.url}/i)\n link_options[:target] = \"_blank\"\n end\n end\n slide_innards = image_tag(slide.slide.url, alt: slide.name, lazy: false)\n if slide.text_value.present?\n slide_innards += content_tag(:div, class: \"homepage-orbit-caption orbit-caption\") do\n content_tag(:div, slide.text_value.html_safe, class: \"caption-content\")\n end\n end\n\n slide_content = (slide.string_value.blank?) ?\n slide_innards :\n link_to(slide_innards, slide_link, link_options)\n end\n\n # We may want to use the built-in captions\n # slide_content += content_tag(:div, \"caption content\", class: \"orbit-caption\")\n content_tag(:li, slide_content)\n end",
"def pan(direction = :left, distance_bp = (self.length_bp.to_f/5).round)\n upstream_slice = self.class.sketch.slices.select{|s| s.start_cumulative_bp < @start_cumulative_bp}.sort_by{|s| s.start_cumulative_bp}[-1]\n downstream_slice = self.class.sketch.slices.select{|s| s.start_cumulative_bp > @start_cumulative_bp}.sort_by{|s| s.stop_cumulative_bp}[0]\n\n #Check if we actually _can_ pan. Can't do that if the slice in the panned\n #direction has less basepairs than what we want to add to our slice in focus\n if ( direction == :left and upstream_slice.length_bp > distance_bp ) or\n ( direction == :right and downstream_slice.length_bp > distance_bp )\n\n #Just so we can always add the distance_pixel\n if direction == :left\n distance_bp = -distance_bp\n end\n\n @start_cumulative_bp += distance_bp\n @stop_cumulative_bp += distance_bp\n self.fetch_sequence\n\n upstream_slice.stop_cumulative_bp = @start_cumulative_bp - 1\n downstream_slice.start_cumulative_bp = @stop_cumulative_bp + 1\n [upstream_slice, downstream_slice].each do |s|\n s.length_bp = s.stop_cumulative_bp - s.start_cumulative_bp + 1\n s.resolution = s.length_pixel.to_f/s.length_bp\n s.range_cumulative_bp = Range.new(s.start_cumulative_bp, s.stop_cumulative_bp)\n s.fetch_sequence\n end\n self.class.sketch.slices.each{|s| s.format_resolution}\n\n self.class.sketch.buffer_images[:zoomed] = self.class.sketch.draw_zoomed_buffer\n self.class.sketch.buffer_images[:information_panel] = self.class.sketch.draw_information_panel\n end\n end",
"def arrowMove1(c,x,y)\n v = $demo_arrowInfo\n newA = (v.x2+5-c.canvasx(x).round)/10\n newA = 0 if newA < 0\n newA = 25 if newA > 25\n if newA != v.a\n c.move('box1', 10*(v.a-newA), 0)\n v.a = newA\n end\nend",
"def calculate_position\n x = default_horizontal_margin\n case current_position\n when :top\n y = default_vertical_margin\n when :middle\n y = (viewport.rect.height - height) / 2\n when :bottom, :left\n y = viewport.rect.height - default_vertical_margin - height\n when :right\n y = viewport.rect.height - default_vertical_margin - height\n x = viewport.rect.height - x - width\n end\n set_position(x, y)\n end",
"def setup_smooth_slide\n tx = @acts[1] + x || 0\n ty = @acts[2] + y || 0\n dur = @acts[3] || 25\n rev = @acts[4]\n rev = true if rev.nil?\n smooth_move(tx,ty,dur,rev)\n end",
"def setup_smooth_slide\n tx = @acts[1] + x || 0\n ty = @acts[2] + y || 0\n dur = @acts[3] || 25\n rev = @acts[4]\n rev = true if rev.nil?\n smooth_move(tx,ty,dur,rev)\n end",
"def target(sprite)\n return if !sprite || !sprite.is_a?(Sprite)\n self.render(Rect.new(0, 0, sprite.width, sprite.height))\n self.anchor = sprite\n end",
"def set_anchor\n @anchor = Anchor.find(params[:id])\n end",
"def ref(x, y, _width, _height)\n translate(x, y) if x != 0 || y != 0\n end",
"def insert_anchor(label)\n @codegen.insert_anchor(label)\n end",
"def update_anchor_create_loc(_bib, eref, docid)\n ins = eref.at(ns(\"./localityStack\")) or return\n type = ins.at(ns(\"./locality/@type\"))&.text\n type = \"clause\" if type == \"annex\"\n ref = ins.at(ns(\"./locality/referenceFrom\"))&.text\n #anchor = @files[docid][:anchors].dig(type, ref) or return\n anchor = @files.get(docid,:anchors).dig(type, ref) or return\n ins << \"<locality type='anchor'><referenceFrom>#{anchor.sub(/^_/, '')}\" \\\n \"</referenceFrom></locality>\"\n end",
"def start_at(x, y = nil)\n @anchor.start_at x, y\n @anchor.from\n end",
"def offset(x, y)\n @left += x\n @top += y\n @right += x\n @bottom += y\n end",
"def partial_slide(step)\n [start_pos[0] + move_dir[0] * step, start_pos[1] + move_dir[1] * step]\n end",
"def reposition_window\n config = MARW_CONFIGURATION\n self.x = config[:window_x] == -1 ? (Graphics.width - window_width) / 2 : config[:window_x]\n self.y = 0\n end",
"def move(xplace, yplace, relative, refresh_flag)\n windows = [@win, @shadow_win]\n subwidgets = [@entry_field, @scroll_field]\n self.move_specific(xplace, yplace, relative, refresh_flag,\n windows, subwidgets)\n end",
"def use_one_cell_anchor\n return if @anchor.is_a?(OneCellAnchor)\n\n new_anchor = OneCellAnchor.new(@anchor.drawing, start_at: [@anchor.from.col, @anchor.from.row])\n swap_anchor(new_anchor)\n end",
"def anchor\n \"p#{id}\"\n end",
"def move\r\n @x += @x_direction\r\n @y += @y_direction\r\n # Preventing the Alien moving out of the screen\r\n if @x > (SCREEN_WIDTH - GAME_PRESET[\"alien_reach\"]) || @x < 0\r\n @x_direction= -@x_direction\r\n elsif @y > (SCREEN_HEIGHT * @height_limit)\r\n @y_direction = 0\r\n end\r\n end",
"def start_at(x, y=0)\n x, y = *parse_coord_args(x, y)\n @graphic_frame.anchor.from.col = x\n @graphic_frame.anchor.from.row = y\n end",
"def set_display_pos(x, y)\n @display_x = (x + @map.width * 256) % (@map.width * 256)\n @display_y = (y + @map.height * 256) % (@map.height * 256)\n @parallax_x = x\n @parallax_y = y\n end",
"def place_at(x, y, direction)\n @position = [x.to_i, y.to_i]\n @front = case direction.downcase \n when \"north\" then :north\n when \"south\" then :east\n when \"east\" then :south\n when \"west\" then :west\n end\n puts \"position is: #{@position} direction is #{@front}\"\n end",
"def swap_anchor(new_anchor)\n new_anchor.drawing.anchors.delete(new_anchor)\n @anchor.drawing.anchors[@anchor.drawing.anchors.index(@anchor)] = new_anchor\n new_anchor.instance_variable_set :@object, @anchor.object\n @anchor = new_anchor\n end",
"def arrowMove3(c,x,y)\n v = $demo_arrowInfo\n newWidth = (v.y+2-c.canvasy(y).round)/5\n newWidth = 0 if newWidth < 0\n newWidth = 20 if newWidth > 20\n if newWidth != v.width\n c.move('box3', 0, 5*(v.width-newWidth))\n v.width = newWidth\n end\nend",
"def add(box, doLayout = true)\n # add the box\n @boxes << box\n box.inPosition = false\n box.parent = self\n \n if doLayout\n findSpot(box)\n end\n end",
"def moves\n # overridden in slideable/stepable modules\n end",
"def pan_view( x = 1, do_display = DO_DISPLAY )\n old_left_column = @left_column\n pan_view_to( @left_column + x, do_display )\n @left_column - old_left_column\n end",
"def initialize(anchor, options = {})\n @anchor = anchor\n @hyperlink = nil\n @anchor.drawing.worksheet.workbook.images << self\n @remote = options[:remote]\n parse_options options\n start_at(*options[:start_at]) if options[:start_at]\n yield self if block_given?\n @picture_locking = PictureLocking.new(options)\n @opacity = (options[:opacity] * 100000).round if options[:opacity]\n end",
"def move(xplace, yplace, relative, refresh_flag)\n windows = [@win, @label_win, @field_win, @shadow_win]\n self.move_specific(xplace, yplace, relative, refresh_flag,\n windows, [])\n end",
"def place(x, y, facing)\n tentative_postion = Position.new(x, y)\n check_position(tentative_postion)\n @position = tentative_postion\n @facing = facing\n end",
"def place_object\n if @places_window.nil?\n hide_object\n return\n end\n place = @places_window.item\n if place.type == 0\n hide_object\n return\n end\n visible_before = @location.opacity == 255\n @location.opacity = 255\n if visible_before\n @location.smooth_move(adx(place.map_x), ady(place.map_y))\n else\n @location.x = adx(place.map_x)\n @location.y = ady(place.map_y)\n end\n @circlel.x = adx(place.map_x)\n @circlel.y = ady(place.map_y)\n end",
"def update_position\n # Calculate values\n target_pos = @direction.object_origin(@target) - @min # Retrieve target ox / oy\n bar_size = @background_sprite.height - @button_slider_sprite.height\n base = @direction.object_position(@background_sprite)\n value = base + (target_pos / (@max - @min)) * bar_size.to_f\n value = base if value.nan? || value < base\n value = base + bar_size if value > base + bar_size\n # Set the slider position\n @direction.set_object_position @button_slider_sprite, value\n end",
"def move(xplace, yplace, relative, refresh_flag)\n windows = [@win, @field_win, @label_win, @shadow_win]\n self.move_specific(xplace, yplace, relative, refresh_flag,\n windows, [])\n end",
"def use_two_cell_anchor\n return if @anchor.is_a?(TwoCellAnchor)\n\n new_anchor = TwoCellAnchor.new(@anchor.drawing, start_at: [@anchor.from.col, @anchor.from.row])\n swap_anchor(new_anchor)\n end",
"def place position\n x,y = position\n\n leftmost = most(position, Left)\n rightmost= most(position, Right)\n topmost= most(position, Top)\n bottommost= most(position, Bottom)\n\n (leftmost[0]..rightmost[0]).each {|x| @horizontal.delete [x,y]}\n (topmost[1]..bottommost[1]).each {|y| @vertical.delete [x,y]}\n\n @horizontal[leftmost] = rightmost[0]\n @vertical[topmost] = bottommost[1] \n end",
"def adjust_for_tip\n move\n turn_right\n move\n turn_around\n end",
"def position_changing(position , to)\n end",
"def mouse_pan(location)\n\n\t\tpan_x = (@dolly_pan_start_point[0] - location.x) / (900.0 / -@camera.view_position.z)\n\t\tpan_y = (@dolly_pan_start_point[1] - location.y) / (900.0 / -@camera.view_position.z)\n\n\t\t@camera.view_position.x -= pan_x\n\t\t@camera.view_position.y -= pan_y\n\n\t\t@dolly_pan_start_point\t= [location.x, location.y]\n\n\t\tupdate_camera_string\n\n\tend",
"def show\n @sel.visible = false\n @typeInd.visible = false\n @background.y -= (@background.bitmap.height/8)\n for i in 0...@nummoves\n @button[\"#{i}\"].x += ((i%2 == 0 ? 1 : -1)*@viewport.width/16)\n end\n end",
"def moving?; @moving; end",
"def moving?; @moving; end",
"def move(direction)\n \n end",
"def display_position\n # Fill this in\n end",
"def display_position\n # Fill this in\n end",
"def place_through(x, y, width_bound, height_bound, image, target)\n new_x = (width_bound - image.columns) / 2 + x\n new_y = (height_bound - image.rows) / 2 + y\n place_onto(new_x, new_y, image, target)\n end",
"def anchor?\n @value == AnchorValue\n end",
"def generate_move_from_click(x, y)\n [:spawn, {:x => x, :y => y}]\n end",
"def scroll_left(distance)\n if loop_horizontal?\n @display_x += @map.width * 256 - distance\n @display_x %= @map.width * 256\n @parallax_x -= distance\n else\n last_x = @display_x\n @display_x = [@display_x - distance, 0].max\n @parallax_x += @display_x - last_x\n end\n end",
"def move\n @x = (@x + @x_velocity) % Window.width\n @y = (@y + @y_velocity) % Window.height\nend",
"def arrowMove2(c,x,y)\n v = $demo_arrowInfo\n newB = (v.x2+5-c.canvasx(x).round)/10\n newB = 0 if newB < 0\n newB = 25 if newB > 25\n newC = (v.y+5-c.canvasy(y).round-5*v.width)/10\n newC = 0 if newC < 0\n newC = 20 if newC > 20\n if newB != v.b || newC != v.c\n c.move('box2', 10*(v.b-newB), 10*(v.c-newC))\n v.b = newB\n v.c = newC\n end\nend",
"def move_to(x, y); end",
"def refresh_window_alignment\n self.x = case @@alignment\n when 0 then 0\n when 1 then Graphics.width/2-(width/2)\n when 2 then Graphics.width-width\n end\n end",
"def move_absolute( coord_x, coord_y, coord_z)\n\n # calculate the number of steps for the motors to do\n\n $status.info_target_x = coord_x\n $status.info_target_y = coord_y\n $status.info_target_z = coord_z\n\n steps_x = coord_x * @axis_x_steps_per_unit\n steps_y = coord_y * @axis_y_steps_per_unit\n steps_z = coord_z * @axis_z_steps_per_unit\n\n @axis_x_pos = steps_x\n @axis_y_pos = steps_y\n @axis_z_pos = steps_z\n\n move_to_coord(steps_x, steps_y, steps_z )\n\n end",
"def place dw\n (@w*@h).times{|startpt|\n return if test_place(dw, startpt)\n }\n end",
"def google_slide_show(feed, \n content_id = 'slide_show_content',\n options = { :displayTime => 2000, :transistionTime => 600, :scaleImages => true, :fullControlPanel => true })\n render :partial => 'google/slide_show', :locals => { :feed => feed, :content_id => content_id, :options => options }\n end",
"def setCenter(center)\n # center.x -> bounds.size.width, center.y -> bounds.size.height\n @target.bounds = CGRectMake(0, 0, center.x, center.y)\n end",
"def place(x, y, direction)\n # Set target position. Cancel processing the method if it's outside current surface\n target_position = NavigationEntities::Position.new(x, y)\n return cancel_action(\"Robot can't be placed outside\") unless surface.position_on_surface?(target_position)\n\n # Set direction of the robot. Store it in the placement object.\n target_direction = NavigationEntities::Direction.new(direction)\n self.target_placement = NavigationEntities::Placement.new(target_position, target_direction)\n\n # Update information about current location.\n update_placement(target_placement)\n end",
"def place(x, y, f)\n return unless valid_position?(x, y)\n set_position(x, y)\n set_direction(f.downcase.to_sym)\n end",
"def location(x, y, w, h)\n return x - w/2, y + 20\n end",
"def layout\n for box in @boxes\n if !box.inPosition\n findSpot(box)\n end\n end\n end",
"def move(direction, map)\n x = @x_location\n y = @y_location\n \n case direction\n when :north then y -= 1\n when :south then y += 1 \n when :west then x -= 1\n when :east then x += 1 \n end\n \n unless map[y][x].is_a_wall?\n @x_location = x\n @y_location = y\n end \n end",
"def emit(dia, xmax, ymax, height)\n ehead xmax, ymax, height\n\n dia.each_pair do |pod_name, actions|\n actions.each do |el|\n title = \"#{pod_name}: #{el.state} started @#{el.start} for #{el.width}s\"\n ebox text: el.state, x_pos: el.x, y_pos: el.y,\n width: el.width, height: el.height, title: title\n end\n etext pod_name, actions.last.x + actions.last.width + 2, actions.last.y + 2\n end\n etail\nend",
"def mouse_dolly(location)\n\n\t\tdolly = (@dolly_pan_start_point[1] - location.y) * -@camera.view_position.z / 300.0\n\t\t@camera.view_position.z += dolly\n\n\t\tif @camera.view_position.z == 0 then @camera.view_position.z = 0.0001 end\n\n\t\t@dolly_pan_start_point\t= [location.x, location.y]\n\n\t\tupdate_camera_string\n\n\tend"
] | [
"0.56952196",
"0.5655555",
"0.5618236",
"0.537118",
"0.53434837",
"0.53434837",
"0.5302156",
"0.5202816",
"0.51520866",
"0.5117545",
"0.5083145",
"0.5079932",
"0.507465",
"0.49990264",
"0.4939094",
"0.4862796",
"0.48468092",
"0.47734866",
"0.47325787",
"0.46962148",
"0.46813878",
"0.46381837",
"0.4618923",
"0.46051067",
"0.45953313",
"0.45826942",
"0.45334998",
"0.45288694",
"0.45276263",
"0.45234632",
"0.45180947",
"0.45091164",
"0.4505502",
"0.44978666",
"0.4488604",
"0.44799703",
"0.44741964",
"0.44666818",
"0.44648293",
"0.44640112",
"0.44618547",
"0.44614515",
"0.44614515",
"0.4447756",
"0.44376075",
"0.44322377",
"0.4432057",
"0.44160852",
"0.43975022",
"0.43972895",
"0.43808407",
"0.43754584",
"0.4365383",
"0.4361254",
"0.43569005",
"0.4354419",
"0.4354311",
"0.43474343",
"0.43399253",
"0.4337031",
"0.4335262",
"0.4332744",
"0.433156",
"0.43287876",
"0.4327902",
"0.43271342",
"0.4324668",
"0.43227643",
"0.43211025",
"0.43185252",
"0.43020144",
"0.4301013",
"0.42943466",
"0.4292711",
"0.42878458",
"0.4286601",
"0.42777503",
"0.42777503",
"0.4269466",
"0.426043",
"0.426043",
"0.42471084",
"0.42458147",
"0.4234516",
"0.42282134",
"0.42276594",
"0.42245784",
"0.42227685",
"0.42200753",
"0.42174476",
"0.42173225",
"0.42000422",
"0.41982588",
"0.4194996",
"0.41889882",
"0.41750008",
"0.41748014",
"0.41710114",
"0.41701487",
"0.4169737"
] | 0.60322 | 0 |
How far along the slide this has to go to be fully extended | def extent
if @anchor == ANCHOR_TOP || @anchor == ANCHOR_BOTTOM
return @rect.h
else
return @rect.w
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def slide_effect(duration, height)\n if @temp_basic_step_height == nil\n @temp_basic_step_height = height / duration.to_f\n \n @we_open_start_height = self.height\n end\n\n step_height = height - (duration * @temp_basic_step_height)\n \n self.height = @we_open_start_height + step_height\n\n if duration == 0\n @temp_basic_step_height = nil\n @we_open_start_height = nil\n end\n end",
"def rel_y\n return y - Graphics.height/2\n end",
"def rel_y\n return y - Graphics.height/2\n end",
"def rel_y\n return y - Graphics.height/2\n end",
"def relative(x); x.to_f/@game.height; end",
"def setup_smooth_slide\n tx = @acts[1] + x || 0\n ty = @acts[2] + y || 0\n dur = @acts[3] || 25\n rev = @acts[4]\n rev = true if rev.nil?\n smooth_move(tx,ty,dur,rev)\n end",
"def setup_smooth_slide\n tx = @acts[1] + x || 0\n ty = @acts[2] + y || 0\n dur = @acts[3] || 25\n rev = @acts[4]\n rev = true if rev.nil?\n smooth_move(tx,ty,dur,rev)\n end",
"def bottom!\n self.ox = self.width/2\n self.oy = self.height\n end",
"def moves\n # overridden in slideable/stepable modules\n end",
"def rel_x\n return x - Graphics.width/2\n end",
"def rel_x\n return x - Graphics.width/2\n end",
"def inertia\n 150\n end",
"def partial_slide(step)\n [start_pos[0] + move_dir[0] * step, start_pos[1] + move_dir[1] * step]\n end",
"def adjust(relative_position)\n current.position += relative_position\n\n self\n end",
"def bottom\n self.ox = self.src_rect.width/2\n self.oy = self.src_rect.height\n end",
"def go_ahead(steps,width=1)\n x0 = @pos.x\n y0 = @pos.y\n x1 = (@pos.x + steps*Math.cos(@angle)).round()\n y1 = (@pos.y - steps*Math.sin(@angle)).round()\n if (@down)\n animiere(3) {\n TkcLine.new( Leinwand.gib_einzige_instanz(),x0,y0,x1,y1,:fill => :black, :width => [1,width] .max()) }\n end\n @pos = Point.new(x1,y1)\n end",
"def move_west\n @x -= 1\n end",
"def default_move_duration; end",
"def height\n @animation.height\n end",
"def relative_offset\n active_position = self.game.player.position\n if self.position >= active_position\n return (active_position - self.position) / 10\n else\n return (active_position - self.position - self.game.number_of_players) / 10\n end\n end",
"def garden_grow(points)\r\n end",
"def slideUp\n if self.height > 0\n @duration = self.animationSpeed\n @orig_height = self.height\n @closing = true\n @opening = false\n @animationType = SLIDE\n end\n end",
"def slide(dir)\n @row, @col = slide_pos(dir)\n make_king\n nil\n end",
"def setup_slide\n return TSBS.error(@acts[0], 4, @used_sequence) if @acts.size < 5\n stop_all_movements\n xpos = (flip && !@ignore_flip_point ? -@acts[1] : @acts[1])\n ypos = @acts[2]\n slide(xpos, ypos, @acts[3], @acts[4], @acts[5] || 0)\n end",
"def pile(w)\n w.drag_to(@current)\n w.set_default_refresh_loc('tl')\n @current.x += PILE_OFFSET\n end",
"def half_wind; end",
"def height\n return @window_height # outer height\nend",
"def height\n top - bottom\n end",
"def next_step\n\t\t# choose the next step based on the lowest absolute delta\n\t\tnext_step = [ delta_right_top, delta_right_mid, delta_right_bot ].min_by { |i| i[1].abs }\n\n\t\t# increase tot_elavation with the delta between current and next step\n\t\t@tot_elavation += next_step[1].abs\n\n\t\treturn next_step[0]\n\tend",
"def y_offset; end",
"def reduce\n @y += 1; @height -= 2\n update_position\n end",
"def farpoint() origin + size end",
"def adjust w\n end",
"def adjust w\n end",
"def setup_slide\n return unless PONY::ERRNO.check_sequence(current_act)\n stop_all_movements\n xpos = (flip && !@ignore_flip_point ? -@acts[1] : @acts[1])\n ypos = @acts[2]\n slide(xpos, ypos, @acts[3], @acts[4], @acts[5] || 0)\n @acts[4].times do\n method_wait\n end\n end",
"def tenth_frame; end",
"def move_down(window)\n\t\t# if the horse isn't yet at the bottom of the screen move it down 20\n\t\tif @y < window.height - @image.height\n\t\t\t@y = @y +20\n\t\tend\n\tend",
"def adjust_duration duration\n grid_increment / duration\n end",
"def visible_height\n @win.maxy - 2\n end",
"def height\n return @shadow_point.y - self.y\n end",
"def height\n return @shadow_point.y - self.y\n end",
"def conveyor_belt_y\n center_y + center_y / 2 + 20\n end",
"def adjust_for_tip\n move\n turn_right\n move\n turn_around\n end",
"def width\n @animation.width\n end",
"def width\n right - left\n end",
"def width\n right - left\n end",
"def action_jump_height\n if (@positions[:jumppeak] > 0) \n return (@positions[:jumppeak] * @positions[:jumppeak] - (@positions[:target][:time] - @positions[:jumppeak]).abs ** 2) / 2\n end\n return 0\n end",
"def height\n @y1 - @y0\n end",
"def x_adjustment\n\t\taverage_x_distance = (@left_sonar_distance + @right_sonar_distance) / 2 \n\n\t\tif average_x_distance > IDEAL_SPRAY_DISTANCE\n\t\t\t@drone.forward(DRONE_THRUST)\n\t\telsif average_x_distance < IDEAL_SPRAY_DISTANCE\n\t\t\t@drone.backward(DRONE_THRUST)\n\t\telse\n\t\t\t# do nothing if at the IDEAL_SPRAY_DISTANCE\n\t\tend\n\tend",
"def player_position_look_update; end",
"def next_y\n state.y + state.dy\n end",
"def width\n return @window_width # outer width\nend",
"def view_offset\n @position + @view.position\n end",
"def screen_y\n @page ? super + @page.adjusted_xy[1] : super\n end",
"def dy() 0 end",
"def move_east\n @x += 1\n end",
"def calculate_heel_drop\n\t\tself.heel_drop = self.heel_stack - self.forefoot_stack\n\tend",
"def position_of_mrx\n if @turns % 5 == 3\n @figures[0].position\n else\n 0\n end\n end",
"def x_offset; end",
"def home\n pen_up\n self.xy = ORIGIN\n self.heading = 0.0\n end",
"def grow \n\t\t@height += 1\n\tend",
"def next_slide()\n if @current_slide < @total_slides - 1\n @current_slide += 1\n end\n end",
"def stretches\n @stretches ||= 20\n end",
"def steps_remaining\n @total_steps - @step\n end",
"def arrowMove3(c,x,y)\n v = $demo_arrowInfo\n newWidth = (v.y+2-c.canvasy(y).round)/5\n newWidth = 0 if newWidth < 0\n newWidth = 20 if newWidth > 20\n if newWidth != v.width\n c.move('box3', 0, 5*(v.width-newWidth))\n v.width = newWidth\n end\nend",
"def start_angle\n end",
"def column_next dir=0\n if dir == 0\n $stact += $grows\n $stact = 0 if $stact >= $viewport.size\n else\n $stact -= $grows\n $stact = 0 if $stact < 0\n end\nend",
"def column_next dir=0\n if dir == 0\n $stact += $grows\n $stact = 0 if $stact >= $viewport.size\n else\n $stact -= $grows\n $stact = 0 if $stact < 0\n end\nend",
"def column_next dir=0\n if dir == 0\n $stact += $grows\n $stact = 0 if $stact >= $viewport.size\n else\n $stact -= $grows\n $stact = 0 if $stact < 0\n end\nend",
"def adjust_x(x)\n if loop_horizontal? and x < @display_x - @margin_x\n return x - @display_x + @map.width * 256\n else\n return x - @display_x\n end\n end",
"def slide(x, y, dur, jump, height = 0)\n slide_x = self.x + x\n slide_y = self.y + y\n goto(slide_x, slide_y, dur, jump, height) unless moving?\n end",
"def slide(x, y, dur, jump, height = 0)\n slide_x = self.x + x\n slide_y = self.y + y\n goto(slide_x, slide_y, dur, jump, height) unless moving?\n end",
"def pan(direction = :left, distance_bp = (self.length_bp.to_f/5).round)\n upstream_slice = self.class.sketch.slices.select{|s| s.start_cumulative_bp < @start_cumulative_bp}.sort_by{|s| s.start_cumulative_bp}[-1]\n downstream_slice = self.class.sketch.slices.select{|s| s.start_cumulative_bp > @start_cumulative_bp}.sort_by{|s| s.stop_cumulative_bp}[0]\n\n #Check if we actually _can_ pan. Can't do that if the slice in the panned\n #direction has less basepairs than what we want to add to our slice in focus\n if ( direction == :left and upstream_slice.length_bp > distance_bp ) or\n ( direction == :right and downstream_slice.length_bp > distance_bp )\n\n #Just so we can always add the distance_pixel\n if direction == :left\n distance_bp = -distance_bp\n end\n\n @start_cumulative_bp += distance_bp\n @stop_cumulative_bp += distance_bp\n self.fetch_sequence\n\n upstream_slice.stop_cumulative_bp = @start_cumulative_bp - 1\n downstream_slice.start_cumulative_bp = @stop_cumulative_bp + 1\n [upstream_slice, downstream_slice].each do |s|\n s.length_bp = s.stop_cumulative_bp - s.start_cumulative_bp + 1\n s.resolution = s.length_pixel.to_f/s.length_bp\n s.range_cumulative_bp = Range.new(s.start_cumulative_bp, s.stop_cumulative_bp)\n s.fetch_sequence\n end\n self.class.sketch.slices.each{|s| s.format_resolution}\n\n self.class.sketch.buffer_images[:zoomed] = self.class.sketch.draw_zoomed_buffer\n self.class.sketch.buffer_images[:information_panel] = self.class.sketch.draw_information_panel\n end\n end",
"def up_step\n start_step\n end",
"def full_height\n height + (margin * 2)\n end",
"def full_height\n height + (margin * 2)\n end",
"def full_height\n height + (margin * 2)\n end",
"def height\n return self.rect.height\n end",
"def right_margin\r\n left_margin + active_width - 1\r\n end",
"def bottom()\n return @top + @height\n end",
"def height\n return self.src_rect.height\n end",
"def slide_from(direction, options={}, more_options={}, &after)\n if options.is_a? Numeric\n size = options\n options = more_options\n else\n size = options[:size]\n end\n\n options[:from_current] = false unless options.key?(:from_current)\n window_size = UIApplication.sharedApplication.windows[0].frame.size\n\n case direction\n when :left\n size ||= window_size.width\n self.center = CGPoint.new(self.center.x - size, self.center.y)\n self.delta_to([size, 0], options, &after)\n when :right\n size ||= window_size.width\n self.center = CGPoint.new(self.center.x + size, self.center.y)\n self.delta_to([-size, 0], options, &after)\n when :top, :up\n size ||= window_size.height\n self.center = CGPoint.new(self.center.x, self.center.y - size)\n self.delta_to([0, size], options, &after)\n when :bottom, :down\n size ||= window_size.height\n self.center = CGPoint.new(self.center.x, self.center.y + size)\n self.delta_to([0, -size], options, &after)\n else\n raise \"Unknown direction #{direction.inspect}\"\n end\n return self\n end",
"def cent_x\n self.x + (@width / 2)\n end",
"def item_y\n contents_height / 2 - line_height * 3 / 2\n end",
"def home\n @heading = 0.0\n @xy = [0.0, 0.0]\n @pen_is_down = false\n end",
"def starting_position; end",
"def remaining_height(base_height)\n base_height.anchor[1] - bounds.anchor[1]\n end",
"def slideDown\n if @orig_height == nil || self.height < @orig_height\n @duration = self.animationSpeed\n if @orig_height == nil\n @orig_height = self.height\n end\n self.height = 0\n self.visible = true\n @opening = true\n @closing = false\n @animationType = SLIDE\n end\n end",
"def moves\n sliding_moves + jumping_moves\n end",
"def min_window\nend",
"def next_x\n if state.dx < 0 # if the player moves left\n return state.x - (state.tile_size - state.player_width) # subtracts since the change in x is negative (player is moving left)\n else\n return state.x + (state.tile_size - state.player_width) # adds since the change in x is positive (player is moving right)\n end\n end",
"def goal\n 28\n end",
"def height\n @height = @height + 1\n end",
"def rel_pos(enpt, vec)\n ratio = @enpt.prog / @enpt.points[1].pos.distance(@enpt.points[2].pos).to_m\n tangent = spline_tangent(@enpt, ratio)\n angle = Math::PI - Math.atan2(tangent.x, tangent.y)\n vec.transform!(Geom::Transformation.rotation(enpt, Z_AXIS, angle))\n # camera is focused at player's head, so we add 200 to the height\n return enpt + vec + [0, 0, 200.m]\n end",
"def tweak(origin_left, args = nil)\n @released = false\n origin_pos = @fraction\n \n self.app.motion do |lf,t|\n pos =(origin_pos - (origin_left - lf))\n if @active and not @released and @range.cover?(pos)\n self.fraction= pos\n if args\n @driven.call( *args.map { |v| v = @fraction if v == \"driver\" } )\n else\n @driven.call @fraction\n end\n end\n end\n self.app.release {|b,l,t| @released = true}\n end",
"def advance(distance = 1)\n @position.x += distance\n end",
"def step_one_points\n\t\tpoints = 0\n\t\tpoints+=50 if !link.blank?\n\t\tpoints+=25 if !title.blank?\n\t\tpoints+=25 if !company.blank?\n\t\tpoints\n\tend",
"def bottom\n @x_max\n end",
"def scroll_step\r\n @options[:scroll_step]\r\n end",
"def frames; motion.frames; end",
"def abs_length()\n case @facing\n when :north, :south; @length\n when :east, :west; @width\n end\n end"
] | [
"0.6298013",
"0.61545056",
"0.61545056",
"0.61545056",
"0.61353254",
"0.6114734",
"0.6114734",
"0.5989357",
"0.59288347",
"0.59227586",
"0.59227586",
"0.58450705",
"0.5824173",
"0.5817732",
"0.5775455",
"0.5763048",
"0.5731604",
"0.5726077",
"0.5692901",
"0.5689719",
"0.5678542",
"0.566012",
"0.564599",
"0.56249374",
"0.56107605",
"0.5589926",
"0.5570152",
"0.5556275",
"0.5549355",
"0.55491275",
"0.5548919",
"0.5506677",
"0.55010146",
"0.55010146",
"0.54832846",
"0.548032",
"0.5476919",
"0.5444363",
"0.54330933",
"0.5431438",
"0.5431438",
"0.5422119",
"0.5420341",
"0.5417248",
"0.54019135",
"0.54019135",
"0.5390291",
"0.53815806",
"0.5376722",
"0.53700644",
"0.5365661",
"0.5362601",
"0.5352401",
"0.5350187",
"0.5339681",
"0.53234494",
"0.5315109",
"0.53045326",
"0.5298664",
"0.5290216",
"0.5289385",
"0.5282771",
"0.5277679",
"0.52776325",
"0.52772266",
"0.5265948",
"0.52620363",
"0.52620363",
"0.52620363",
"0.5257142",
"0.5256162",
"0.5256162",
"0.52527696",
"0.52482903",
"0.5238441",
"0.5238441",
"0.5238441",
"0.52364486",
"0.5234792",
"0.52345407",
"0.52134943",
"0.5213295",
"0.52123505",
"0.5209454",
"0.5209026",
"0.52077675",
"0.52040505",
"0.51933354",
"0.5189354",
"0.5188381",
"0.5185329",
"0.5173986",
"0.51706004",
"0.5169885",
"0.516897",
"0.51672596",
"0.51662153",
"0.516564",
"0.5163221",
"0.5144567",
"0.5142431"
] | 0.0 | -1 |
Whether or not this is fully hidden | def hidden?
return true if @state == STATE_HIDDEN
ext = extent
case(@anchor)
when ANCHOR_TOP
return @slide_offset <= -ext
when ANCHOR_LEFT
return @slide_offset <= -ext
when ANCHOR_BOTTOM
return @slide_offset >= 0
when ANCHOR_RIGHT
return @slide_offset >= 0
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hidden?\n not visible\n end",
"def hidden?()\n not visible?()\n end",
"def is_visible?\n visibility && ( visibility > 0 )\n end",
"def is_hidden?\n\t\tself.hidden\n\tend",
"def hidden?\n false\n end",
"def hidden?\n false\n end",
"def hidden?\n false\n end",
"def visible?\n @visible\n end",
"def visible?\n @visible\n end",
"def visible?\n @style.display != 'none'\n end",
"def invisible?\n false\n end",
"def hidden?\n hidden_at?\n end",
"def hidden?\n hidden_at?\n end",
"def hidden?\n hidden_at?\n end",
"def visible?\n true\n end",
"def hidden?\n return @hidden\n end",
"def invisible?\n false\n end",
"def invisible?\n false\n end",
"def visible?\n\t\t\t@visible\n\t\tend",
"def hide?\n @hide\n end",
"def visible?\n el.displayed?\n end",
"def visible?\n end",
"def hidden?\n return @hidden\n end",
"def visible?; \n\t\t@visible = true if @visible == nil\n\t\t@visible\n\tend",
"def panel_hidden\n Panel.panel_hidden(pointer) == 0\n end",
"def hidden?\n @hidden\n end",
"def none?\n self.display_on == \"none\"\n end",
"def not_hidden?\n not hidden\n end",
"def hidden?\n end",
"def visible?\n @a.nonzero?\n end",
"def showable?\n hidden? && hidden_origin.blank?\n end",
"def showable?\n hidden? && hidden_origin.blank?\n end",
"def visible\n return @viewport.visible\n end",
"def visible?\n wd_element.displayed?\n end",
"def panel_hidden\n FFI::NCurses.panel_hidden(@pointer) == 0\n end",
"def hidden?()\n @view__.hidden?\n end",
"def hidden?(*)\n false\n end",
"def visible\n return @stack[1].visible\n end",
"def hidden?\n self.privacy == HIDDEN\n end",
"def hidden?\n @field.widget.hidden?\n end",
"def visible?\n !value.nil?\n end",
"def hidden?\n classes.include?('hidden')\n end",
"def visibility?\n @visibility || true\n end",
"def visible\n return @bar.visible\n end",
"def visibility_changed?\n !(@old_visible_value == visible)\n end",
"def in_hidden_entrance\r\n\r\n # Determine the tile placement, as integers \r\n tile_x = (@player.get_x/16).to_i\r\n tile_y = (@player.get_y/16).to_i\r\n\r\n # Hidden tile is present, so return true\r\n if @level[1][tile_y-1][tile_x] > 0 then\r\n return true\r\n\r\n # Otherwise the player is no longer hidden\r\n else\r\n return false\r\n end\r\n end",
"def hidden?\n if object.respond_to? :menu_hidden\n object.menu_hidden\n end\n end",
"def hide_elements\n true\n end",
"def visible?\n @dialog ? true : false\n end",
"def visible?() \n if @visible.nil?\n visible = parent.nil? ? true : parent.visible?\n else\n visible = @visible\n end\n visible = instance_eval &visible if visible.kind_of?(Proc)\n visible\n end",
"def visible?\n parent.nil?\n end",
"def visible?(sx,sy)\n (sx + @width / 2 > 0 && sx - @width / 2 < Common::SCREEN_X && sy + @height / 2 > 0 &&\n sy - @height / 2 < Common::SCREEN_Y)\n end",
"def visible?\n return @peer.visible\n end",
"def use_hidden_layers?\n end",
"def check_visible(obj)\n return obj.gr_is_visible?\n end",
"def visible?\n Waiter.wait_for do\n inst = presence\n !! inst && inst.visible?\n end\n end",
"def visible?(sx,sy)\n (sx + @width > 0 && sx < Common::SCREEN_X && sy + @height > 0 &&\n sy < Common::SCREEN_Y)\n end",
"def visible\n @ole.Visible\n end",
"def visible\n @ole.Visible\n end",
"def visible\n @ole.Visible\n end",
"def visible\n @ole.Visible\n end",
"def refresh_view?\n visible? && (ox >= bordered_width || oy >= bordered_height)\n end",
"def hidden?; self[:hidden] ||= false; end",
"def hide!\n visible(false)\n end",
"def hide!\n visible(false)\n end",
"def wrapper_visibility\n @field_config[:hide_on_load] = !@field_config[:initially_visible] if @field_config&.key?(:initially_visible)\n return '' unless @field_config[:hide_on_load]\n\n ' hidden'\n end",
"def is_showable\n return true unless self.is_hidden or self.is_deleted or self.status == \"tmp\"\n end",
"def hidden!\n @hidden = true\n self\n end",
"def visible? component\n r, c = component.rowcol\n return false if c+@cols_panned < @orig_left\n return false if c+@cols_panned > @orig_left + @display_w\n # XXX TODO for rows UNTESTED for rows\n return false if r + @rows_panned < @orig_top\n return false if r + @rows_panned > @orig_top + @display_h\n\n return true\n end",
"def hidden?\n hooked? && currently_defined?\n end",
"def visible? component\n r, c = component.rowcol\n return false if c+@cols_panned < @orig_left\n return false if c+@cols_panned > @orig_left + @display_w\n # XXX TODO for rows UNTESTED for rows\n return false if r + @rows_panned < @orig_top\n return false if r + @rows_panned > @orig_top + @display_h - 2\n\n return true\n end",
"def show(&block)\n visible block || true\n end",
"def is_visible?\n setup.show_all? || Date.today >= start_at\n end",
"def hide\n @visible = false\n self\n end",
"def hidden?\n enabled = (@hidden_feature_set || []).any? do |feature|\n Feature.enabled?(feature)\n end\n @feature_hidden && !enabled\n end",
"def isVisible\n DOM.isVisible(@element)\n end",
"def visible?\n assert_exists\n @element.displayed?\n end",
"def visible?\n assert_exists\n @element.displayed?\n end",
"def hidden?\n qattrs[:hidden] == '1'\n end",
"def victory?\n self.grid.flatten.select {|space| !space.mine }.\n all? {|space| space.visible }\n end",
"def universal_visibility?\n self.visibility == :universal\n end",
"def toggle\n if visible?\n hide\n\n else\n show\n\n end\n end",
"def visible?\n !(self.created? || self.suspended? || self.deleted?)\n end",
"def visible?\n !(self.created? || self.suspended? || self.deleted?)\n end",
"def visible?\n !(self.created? || self.suspended? || self.deleted?)\n end",
"def hide(&block)\n visible block ? proc { false == (instance_eval &block) } : false\n end",
"def visible?\n if ta_visible || peer_visible\n true\n else\n errors.add(:base, I18n.t('activerecord.errors.models.criterion.visibility_error'))\n false\n end\n end",
"def forFriendfunctions\n @status = false\n self.hide\n showElements\n end",
"def visa_check?\n schengen_overstay? == false && visa_overstay? == false\n end",
"def isHidden _args\n \"isHidden _args;\" \n end",
"def hide\n self.visible = false\n clear_dmg_preview\n end",
"def invisible?\n resolved_rules.members.all? {|seq| seq.invisible?}\n end",
"def hidden?\n return false if parent.tag == :root\n return true if parent.open == false\n parent.hidden?\n end",
"def cvv_help_displayed?\n cvv_help_div.visible?\n end",
"def has_images?\n images.length > 0 && hidden == false\n end",
"def hideSection(frameH, hideB, label, binB, isHidden)\n if !isHidden\n frameH.backColor = FXRGB(176, 176, 176)\n frameH.borderColor = FXRGB(106, 106, 106)\n label.backColor = FXRGB(176, 176, 176)\n label.textColor = FXRGB(56, 56, 56)\n binB.backColor = FXRGB(176, 176, 176)\n hideB.backColor = FXRGB(176, 176, 176)\n isHidden = true\n else\n frameH.backColor = FXRGB(212, 208, 200)\n frameH.borderColor = FXRGB(0, 0, 0)\n label.backColor = FXRGB(212, 208, 200)\n label.textColor = FXRGB(0, 0, 0)\n binB.backColor = FXRGB(212, 208, 200)\n hideB.backColor = FXRGB(212, 208, 200)\n isHidden = false\n end\n\n return isHidden\n end",
"def visible(value = true)\n boolean = value ? true : false\n\n model.visible = boolean\n end",
"def hideable?\n @hideable ||= [\"Task deferred\", \"Project deferred\", \"Waiting on\", \"Hanging\"].include?(status)\n end",
"def show; @showing = false; end",
"def init_visibility\n return if actor? && !@battler.data_battler.dead_key.empty?\n @battler_visible = !@battler.hidden? && (@battler.enemy? ? \n !@battler.collapsed : true)\n self.opacity = 0 unless @battler_visible\n end"
] | [
"0.82311636",
"0.8133142",
"0.8063336",
"0.79033816",
"0.78247166",
"0.7776644",
"0.7776644",
"0.77705187",
"0.77705187",
"0.77097124",
"0.76978064",
"0.7676774",
"0.7676774",
"0.7676774",
"0.76727",
"0.7670146",
"0.7657416",
"0.7657416",
"0.761389",
"0.7594288",
"0.75856316",
"0.7571048",
"0.75665885",
"0.75515074",
"0.75487214",
"0.75103533",
"0.7401055",
"0.73957694",
"0.7349331",
"0.7305159",
"0.72884786",
"0.72884786",
"0.7268701",
"0.7227226",
"0.7222338",
"0.7222335",
"0.7191638",
"0.7166401",
"0.71475893",
"0.71243656",
"0.7115571",
"0.7111751",
"0.70247895",
"0.7020549",
"0.6898012",
"0.689242",
"0.6849233",
"0.68467665",
"0.681805",
"0.68051517",
"0.6787592",
"0.6726723",
"0.6726031",
"0.671726",
"0.67123866",
"0.67068183",
"0.6705972",
"0.66573215",
"0.66573215",
"0.66573215",
"0.66573215",
"0.6642034",
"0.66411495",
"0.66051865",
"0.66051865",
"0.6591033",
"0.65569633",
"0.6550391",
"0.6516132",
"0.6513793",
"0.65125245",
"0.6496571",
"0.6471135",
"0.6448",
"0.6416469",
"0.6405407",
"0.6396818",
"0.6396818",
"0.6394613",
"0.63927925",
"0.63767153",
"0.63548535",
"0.6351683",
"0.6351683",
"0.6351683",
"0.63460684",
"0.6328358",
"0.6314954",
"0.6311734",
"0.63069975",
"0.6306205",
"0.63049704",
"0.62969667",
"0.627017",
"0.6265833",
"0.6240211",
"0.62344736",
"0.62036395",
"0.6201198",
"0.6192995"
] | 0.72321993 | 33 |
Programatically hides this box, without going through any necessary states. | def hide
@state = STATE_HIDDEN
if @anchor == ANCHOR_TOP || @anchor == ANCHOR_LEFT
@slide_offset = extent * -@slide_direction - 1
else
@slide_offset = 0
end
setup_gui
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hide!\n visible(false)\n end",
"def hide!\n visible(false)\n end",
"def hide\n @visible = false\n self\n end",
"def hide!\n self[:hidden] = true\n self\n end",
"def hide\n self.visible = false\n clear_dmg_preview\n end",
"def hide; end",
"def hide; end",
"def hide\n #return unless visible? # added 2011-10-14 these 2 are not behaving properly\n Ncurses::Panel.hide_panel @panel.pointer\n #Ncurses.refresh # wnoutrefresh\n Ncurses::Panel.update_panels # added so below window does not need to do this 2011-10-1 \n @visible = false\n end",
"def hide_secret\n @is_showing_secret = false\n # TODO secret should be using a JPanel or something, not a dialog.\n end",
"def hidden!\n @hidden = true\n self\n end",
"def hide\n self.visible = false\n self.active = false\n Input.text_input = false if $MKXP\n end",
"def hide\n @peer.hide\n end",
"def hide\n ShowWindow.call(@handle, SW_HIDE)\n end",
"def hide(position)\n row, col = position\n @secret_grid[row][col].hide\n end",
"def hide_hid!\n @hide_hid = true\n end",
"def hide\n call Screen.setColor(false)\n call draw\n end",
"def hide\n super\n\n Vedeu.buffers.by_name(name).hide\n end",
"def hide\n @sel.visible = false\n @typeInd.visible = false\n @background.y += (@background.bitmap.height/8)\n @megaButton.y += 12\n for i in 0...@nummoves\n @button[\"#{i}\"].x -= ((i%2 == 0 ? 1 : -1)*@viewport.width/16)\n end\n @showMega = false\n @megaButton.src_rect.x = 0\n end",
"def hide(val = true)\n show_in_details !val\n show_in_table !val\n editable !val\n end",
"def unhide\n client.post('/api/unhide', id: read_attribute(:name))\n end",
"def hide(id)\n\t\tset_attribute_property(id, 'style', DISPLAY, NONE)\n\tend",
"def hide_panel\n FFI::NCurses.hide_panel(@pointer)\n end",
"def hide_object\n @location.opacity = 0\n @circlel.opacity = 0\n end",
"def hide()\n @view__.hide\n end",
"def hide_panel\n Panel.hide_panel(pointer)\n end",
"def hide\n client.post('/api/hide', id: read_attribute(:name))\n end",
"def hide\n set_publish_state(Event::DRAFT_STATE)\n end",
"def hide?\n @hide\n end",
"def toggle\n style[:hidden] = !style[:hidden]\n update_visibility\n end",
"def hide(&block)\n visible block ? proc { false == (instance_eval &block) } : false\n end",
"def hidden?\n @field.widget.hidden?\n end",
"def hide\n \tupdate_attribute(:hidden, true)\n end",
"def hide\n Control.functions[__method__] ||= AU3_Function.new(\"ControlHide\", 'SSS', 'L')\n res = Control.functions[__method__].call(@title.wide, @text.wide, @c_id.wide)\n raise_unfound if res == 0\n nil\n end",
"def unhide\n post(\"/api/unhide\", id: fullname)\n end",
"def hidden?\n false\n end",
"def hidden?\n false\n end",
"def hidden?\n false\n end",
"def hide_window\n end",
"def hidden?\n not visible\n end",
"def unhide!\n update_attribute(:hidden_at, nil)\n end",
"def unhide!\n update_attribute(:hidden_at, nil)\n end",
"def unhide!\n update_attribute(:hidden_at, nil)\n end",
"def hide_as(arg = nil, &proc) # :yields: field_def or nil\n @view_control.hide_as(arg, &proc)\n end",
"def hide_frame_state(graph)\n graph.nodes.each_value do |node|\n if FRAME_STATE_NODES.include?(node.props.dig(:node_class, :node_class))\n node.props[:hidden] = true\n end\n end\n end",
"def hideLayer(layer)\n\t\tlayer['style'] = setting(DISPLAY, NONE)\n\tend",
"def hide_subnet\n render :update do |page|\n page.visual_effect :toggle_blind, 'subnet'\n end\n end",
"def hide_elements\n true\n end",
"def hide_floating(graph)\n graph.nodes.each_value do |node|\n if node.edges.none? { |e| e.props[:kind] == 'control' }\n node.props[:hidden] = true\n end\n end\n end",
"def hidden?()\n not visible?()\n end",
"def hideSection(frameH, hideB, label, binB, isHidden)\n if !isHidden\n frameH.backColor = FXRGB(176, 176, 176)\n frameH.borderColor = FXRGB(106, 106, 106)\n label.backColor = FXRGB(176, 176, 176)\n label.textColor = FXRGB(56, 56, 56)\n binB.backColor = FXRGB(176, 176, 176)\n hideB.backColor = FXRGB(176, 176, 176)\n isHidden = true\n else\n frameH.backColor = FXRGB(212, 208, 200)\n frameH.borderColor = FXRGB(0, 0, 0)\n label.backColor = FXRGB(212, 208, 200)\n label.textColor = FXRGB(0, 0, 0)\n binB.backColor = FXRGB(212, 208, 200)\n hideB.backColor = FXRGB(212, 208, 200)\n isHidden = false\n end\n\n return isHidden\n end",
"def hide\n @hidden = 1\n\n # A hidden worksheet shouldn't be active or selected.\n @selected = 0\n set_activesheet(0)\n set_firstsheet(0)\n end",
"def hideElements \n @hideElements.each do |element|\n element.hide\n end\n end",
"def hide(*stuff)\n hide_methods(self, [Object], *stuff)\n end",
"def disable_input_box\n hc_pomodoro_input_box.setHidden(true)\n summary_widget.show\n summary_widget.update_yesterday_count(@pomodori_controller.yesterday_pomodoros.size)\n summary_widget.update_today_count(@pomodori_controller.today_pomodoros.size)\n summary_widget.update_average_count(@pomodori_controller.average_pomodoros)\n end",
"def show\n style[:hidden] = false\n update_visibility\n end",
"def forFriendfunctions\n @status = false\n self.hide\n showElements\n end",
"def unhide app\n app.perform :unhide\n end",
"def hide(locator)\n execute_script(%(\n arguments[0].style.display = 'none';\n ), find_element(locator))\n end",
"def hide\n @notice.display(false)\n end",
"def hideElement=(value)\n\t\t\t@hideElement = value\n\t\tend",
"def hideElement=(value)\n\t\t\t@hideElement = value\n\t\tend",
"def default_hide(for_view_control = @view_control)\n if this_field = get_field_defs_field(for_view_control)\n this_field.hide_proc.call(current_object)\n else\n \"\"\n end\n end",
"def hide\n ConsoleGlitter.escape(\"?25l\")\n end",
"def hide_keyboard\n self.get.sendAction(:resignFirstResponder, to:nil, from:nil, forEvent:nil)\n end",
"def invisible(stream = $stdout)\n stream.print(hide)\n yield\n ensure\n stream.print(show)\n end",
"def pbHideAllDataboxes(side = nil)\n return if @dataBoxesHidden\n # remove databox visibility\n @battlers.each_with_index do |b, i|\n next if !b || (!side.nil? && i%2 != side)\n @sprites[\"dataBox_#{i}\"].visible = false\n end\n @dataBoxesHidden = true\n end",
"def hideL(layer_label)\n\t\thideLayer(getLayer(layer_label))\n\tend",
"def hidden?(*)\n false\n end",
"def hideOffer()\n @item['offer']['activate'] = false\n self.updateOffer()\n end",
"def initialize\r\n clear\r\n @visible = false\r\n end",
"def show; @showing = false; end",
"def hide_scrollbars\n set_overflow(OVERFLOW_HIDDEN)\n end",
"def hide\n @hidd = 1\n\n # A hidden worksheet shouldn't be active or selected.\n @selecte = 0\n @activesheet = 0\n @firstsheet = 0\n end",
"def as_hidden(attrs=nil, only_initial=false)\n as_widget(@field.hidden_widget, attrs, only_initial)\n end",
"def hide_info_windows\n @info_window.smooth_move(Graphics.width, @info_window.y)\n @equip_window.smooth_move(Graphics.width, @equip_window.y)\n @keys_window.smooth_move(0, Graphics.height)\n end",
"def panel_hidden\n FFI::NCurses.panel_hidden(@pointer) == 0\n end",
"def setHideBehind _obj, _args\n \"_obj setHideBehind _args;\" \n end",
"def hide_uoc_info_form\n logger.info 'Hiding the Use of Collections Info form'\n wait_for_element_and_click form_show_hide_button('Use of Collections Information')\n end",
"def disown(w)\n super(w)\n DOM.changeToStaticPositioning(w.getElement)\n end",
"def hide_element(element)\n execute_script(\"arguments[0].style.display='none'\", element)\n end",
"def hidden?; self[:hidden] ||= false; end",
"def is_hidden?\n\t\tself.hidden\n\tend",
"def toggle\n if style![:display] == :none\n show\n else\n hide\n end\n end",
"def make_hidden\n @linkage_vis = :hidden\n end",
"def setHideWindowUi(value)\n @fields['hide_window_ui'] = value\n self\n end",
"def setHideWindowUi(value)\n @fields['hide_window_ui'] = value\n self\n end",
"def setHideWindowUi(value)\n @fields['hide_window_ui'] = value\n self\n end",
"def panel_hidden\n Panel.panel_hidden(pointer) == 0\n end",
"def setHidden(index)\n if @layerArray[index] == false\n @layerArray[index] = true\n else\n @layerArray[index] = false\n end\n @canvas.update\n end",
"def forbidding(*args, &block)\n\t\t\t\t\tself.instruction.forbidding *args, &block\n\t\t\t\tend",
"def initially_hide_button\n @hidden_submit = true\n end",
"def hide(cidr)\n self.execute(['--local', 'hide', cidr])\n end",
"def invisible?\n false\n end",
"def invisible?\n false\n end",
"def hide!\n update_attribute(:hidden_at, Time.now)\n end",
"def hide!\n update_attribute(:hidden_at, Time.now)\n end",
"def hide_unless(condition)\n hide(:unless => condition)\n end",
"def Edit_HideBalloonTip(hwnd) send_edit_message(hwnd, :HIDEBALLOONTIP) end",
"def unhide\n @goal = Mg::Goal.find(params[:id])\n @goal.update_attribute(:is_hidden, false)\n flash[:notice] = \"Goal #{@goal.name} has been restored.\"\n \n respond_to do |format|\n format.html { redirect_to mg_goals_url }\n format.xml { head :ok }\n end\n end",
"def invisible?\n false\n end"
] | [
"0.7594872",
"0.7594872",
"0.7587589",
"0.7535907",
"0.72000504",
"0.70249903",
"0.70249903",
"0.6931334",
"0.68570924",
"0.68235856",
"0.67397046",
"0.6721017",
"0.6718066",
"0.670836",
"0.66130656",
"0.65533435",
"0.6549156",
"0.652437",
"0.6493507",
"0.6486117",
"0.64640206",
"0.64601916",
"0.6411459",
"0.64031214",
"0.63721025",
"0.6358848",
"0.6278293",
"0.6255558",
"0.6254792",
"0.6254297",
"0.62407416",
"0.6227766",
"0.62243193",
"0.61693174",
"0.614797",
"0.614797",
"0.61339414",
"0.61053085",
"0.60802263",
"0.60678846",
"0.60678846",
"0.60678846",
"0.6062844",
"0.6055283",
"0.6049353",
"0.60406774",
"0.60400164",
"0.6025041",
"0.60206443",
"0.5972241",
"0.59642136",
"0.59019464",
"0.589312",
"0.58862096",
"0.5862078",
"0.5851801",
"0.5839903",
"0.5812248",
"0.580208",
"0.57956636",
"0.57956636",
"0.57907206",
"0.57891434",
"0.57828087",
"0.57748175",
"0.5766692",
"0.5757083",
"0.5752791",
"0.5750238",
"0.57393396",
"0.5735723",
"0.5731383",
"0.57308227",
"0.57072085",
"0.5706767",
"0.57000715",
"0.5697544",
"0.5688902",
"0.5687568",
"0.5677287",
"0.56757045",
"0.5675169",
"0.5668849",
"0.5649248",
"0.5587927",
"0.5587927",
"0.5587927",
"0.55762124",
"0.5559738",
"0.5559532",
"0.55470276",
"0.551203",
"0.55054617",
"0.55054617",
"0.5504293",
"0.5504293",
"0.55001503",
"0.54970014",
"0.5483808",
"0.5476799"
] | 0.68795395 | 8 |
Subclasses should override this to place things in the slider, set its rect w/h and then call up to this method, as this will set the rect x/y | def setup_gui
x = x_for_anchor(@anchor).to_i
y = y_for_anchor(@anchor).to_i
translate_to(x,y)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_position\n # Calculate values\n target_pos = @direction.object_origin(@target) - @min # Retrieve target ox / oy\n bar_size = @background_sprite.height - @button_slider_sprite.height\n base = @direction.object_position(@background_sprite)\n value = base + (target_pos / (@max - @min)) * bar_size.to_f\n value = base if value.nan? || value < base\n value = base + bar_size if value > base + bar_size\n # Set the slider position\n @direction.set_object_position @button_slider_sprite, value\n end",
"def drawrect(*)\n super\n end",
"def update_position\n @curr.set_to(@parent.center)\n self.width = @scale_to_length \\\n ? @parent.curr_length * @rect_scale \\\n : @parent.rest_length * @rect_scale\n self.height = @rect_height\n self.radian = @parent.radian\n end",
"def set_rect(width, height)\n @width, @height = width, height\n zoom_x, zoom_y = (@width.to_f/@image_w.to_f)*@scale[0], (@height.to_f/@image_h.to_f)*@scale[1]\n @background.zoom_x, @background.zoom_y = zoom_x, zoom_y\n if @frame then @frame.zoom_x, @frame.zoom_y = zoom_x, zoom_y end\n end",
"def sliderSetPosition _obj, _args\n \"_obj sliderSetPosition _args;\" \n end",
"def sliderSetRange _obj, _args\n \"_obj sliderSetRange _args;\" \n end",
"def item_rect(*args, &block)\n rect = super(*args, &block)\n rect.y += [(rect.height - 98) / 2, 0].max\n rect.height = 98\n rect\n end",
"def set_rect\n if character\n x_rect, y_rect = self.x-self.ox*self.zoom_x, self.y-self.oy*self.zoom_y\n w_rect, h_rect = self.src_rect.width*self.zoom_x, self.src_rect.height*self.zoom_y\n character.rect.set(x_rect, y_rect, w_rect, h_rect)\n end\n end",
"def update_placement\n self.x = (Graphics.width - self.width) / 2\n self.y = (Graphics.height - self.height) / 2\n end",
"def set(*args)\n self.x, self.y, self.w, self.h = *Rect.extract(args.singularize)\n self\n end",
"def prepare_slider(element)\n renderables = []\n\n # The slider background\n renderables << create_rect_renderable(element.w, element.h, element_color)\n\n # Ensure the location of the cursor in the slider bar has been initialized\n element.cursor_pos ||= [element.x, element.y]\n\n # Determine the value of the slider and the position of its renderable\n slider_width = element.continuous ? self.slider_size : (element.w / element.values.size)\n slider_height = element.h\n\n slider_extremes = [slider_width / 2.0, element.w - (slider_width / 2.0)]\n total_slider = element.w - slider_width\n\n clamped_pos = [[element.cursor_pos[0].to_f, slider_extremes.min].max, slider_extremes.max].min\n position_ratio = (clamped_pos - slider_extremes.min) / total_slider\n\n if element.continuous\n value_range = element.values.max - element.values.min\n\n element.current_value = (value_range * position_ratio) + element.values.min\n else\n max_index = element.values.size - 1\n current_index = (max_index * position_ratio).round\n position_ratio = (current_index.to_f / max_index)\n\n element.current_value = element.values[current_index]\n end\n\n # The slider bar\n slider_pos = [(total_slider * position_ratio).to_i, 0]\n renderables << create_offset_rect_renderable(slider_width, slider_height, slider_pos[0], slider_pos[1], element_color)\n renderables.concat(create_border(slider_pos[0], slider_pos[1], slider_width, slider_height))\n\n # Add the slider extrema value labels\n add_left_aligned_text(element, element.values.min.to_s) if element.current_value != element.values.min\n add_right_aligned_text(element, element.values.max.to_s) if element.current_value != element.values.max\n add_text_at(element, slider_pos[0], slider_pos[1], slider_width, element.h, element.current_value.to_s)\n\n element.add_renderables(renderables)\n end",
"def fillrect(*)\n super\n end",
"def set_position(x, y)\n @background.set_position(x, y)\n @bar.set_position(x + @bx, y + @by)\n end",
"def update_slot_position\n @screenshot.x = @x\n @screenshot.y = @y\n @info_rect.x = @x\n @info_rect.y = @y\n if @loading_bar != nil\n @loading_bar.x = @x + 30\n @loading_bar.y = @y + 80\n end\n end",
"def update_size\n @max_x = @glade['drawingarea'].allocation.width - 1\n @max_y = @glade['drawingarea'].allocation.height - 1\n @glade['xvalue'].set_range(1,@max_x)\n @glade['yvalue'].set_range(1,@max_y)\n end",
"def initialize(x, y, w, h, ch)\n super(x, y, w, h)\n @bg = EH.sprite(\"gui/container_background\")\n @scrollbar = Scrollbar.new(x+w-24, y, 24, h)\n @ch = ch\n @items = []\n @item = nil\n @changed = false\n end",
"def resize width, height\n @widgets[@index].width = width\n @widgets[@index].height = height\n @widgets[@index].repaint\n end",
"def update_capture_rect\n @zoom = [100, @zoom].max\n tx, ty = @zoom_target_x, @zoom_target_y\n f = @zoom / 100.0\n w = (Graphics.width / f).to_i\n h = (Graphics.height / f).to_i\n x = (tx - w / 2.0).to_i.bound(0, Graphics.width - w)\n y = (ty - h / 2.0).to_i.bound(0, Graphics.height - h)\n @capture_rect.set(x, y, w, h)\n end",
"def item_rect(index)\r\n rect = super\r\n rect.x = index * (item_width + spacing)\r\n rect.y = 0\r\n rect\r\n end",
"def item_rect(index)\n rect = super\n rect.x = index * (item_width + spacing)\n rect.y = 0\n rect\n end",
"def x=(value)\n rect.x = value\n end",
"def call\n r = (@identity[2].to_f / @identity[1].to_f).ceil\n c = (@identity[2] % @identity[1]).zero? ? @identity[1] : @identity[2] % @identity[1]\n @ymax = 1 - (1.to_f / @identity[0]) * (r - 1) - 0.095 / @identity[0]\n @ymin = 1 - (1.to_f / @identity[0]) * r + 0.095 / @identity[0]\n @xmin = (1.to_f / @identity[1]) * (c - 1) + 0.095 / @identity[1]\n @xmax = (1.to_f / @identity[1]) * c - 0.095 / @identity[1]\n\n @x_axis_padding = Math.log((@x_range[1] - @x_range[0]), 10).round\n @y_axis_padding = Math.log((@y_range[1] - @y_range[0]), 10).round\n\n @origin[0] = @x_range[0] - @x_axis_padding if @origin[0] == :default\n @origin[1] = @y_range[0] - @y_axis_padding if @origin[1] == :default\n\n SetViewPort.new(@xmin, @xmax, @ymin, @ymax).call\n SetWindow.new(@x_range[0] - @x_axis_padding, @x_range[1] + @x_axis_padding,\n @y_range[0] - @y_axis_padding, @y_range[1] + @y_axis_padding).call\n # Make sure that window is set bigger than range figure out how to manage it\n SetTextAlign.new(2, 0).call\n @text_font = :times_roman if @text_font == :default\n SetTextFontPrecision.new(GR_FONTS[@text_font],\n GR_FONT_PRECISION[:text_precision_string]).call\n SetCharHeight.new(0.012).call\n @y_tick_count = 10 if @y_tick_count == :default\n @x_tick_count = 10 if @x_tick_count == :default # 10 ticks by default\n SetLineColorIndex.new(hex_color_to_gr_color_index(COLOR_INDEX[:black])).call\n SetLineWidth.new(1).call\n SetLineType.new(GR_LINE_TYPES[:solid]).call\n Grid.new((@x_range[1] - @x_range[0]).to_f / @x_tick_count,\n (@y_range[1] - @y_range[0]).to_f / @y_tick_count,\n 0, 0, 1, 1).call\n Axes.new((@x_range[1] - @x_range[0]).to_f / @x_tick_count,\n (@y_range[1] - @y_range[0]).to_f / @y_tick_count,\n @origin[0], @origin[1], 1, 1, 0.01).call\n AxesTitles.new(@x_title, @y_title,'').call\n @tasks.each do |task|\n task.call() if task.plot_type == :robust\n task.call(self) if task.plot_type == :lazy\n end\n end",
"def set_dimensions(*dims)\n if dims[0].is_a?(Rect)\n rect = dims[0]\n elsif dims[0].is_a?(Integer) && dims[1].is_a?(Integer) && dims[2].is_a?(Integer) && dims[3].is_a?(Integer)\n rect = Rect.new(dims[0],dims[1],dims[2],dims[3])\n else\n raise ArgumentError, 'invalid dimension(s) for ApplicationWindow: ' + dims.inspect\n end\n self.x = rect.x\n self.y = rect.y\n self.width = rect.width\n self.height = rect.height\n end",
"def initialize(x, y, width)\r\n super(x, y, width, window_height)\r\n @actor = nil\r\n refresh\r\n end",
"def setWidgetPosition(w, left, top)\n checkWidgetParent(w)\n DOM.setAbsolutePixelPosition(w.getElement, left, top)\n end",
"def resize_event event\n @widget.size = event.size\n scene.scene_rect =\n Qt::RectF.new(0, 0, event.size.width, event.size.height)\n end",
"def rect; end",
"def rect; end",
"def setBounds\n true\n end",
"def draw\n @parent.mask(:y) do \n parent_box = @parent.bounds \n @parent.bounds = self \n @parent.y = absolute_top\n @action.call \n @parent.bounds = parent_box\n end\n end",
"def update\n @box.top += 2\n @box.left -= 2\n @box.bottom += 2\n @box.right -= 2\n end",
"def y=(value)\n rect.y = value\n end",
"def rect x, y, w, h, c, fill = false\n screen.draw_rect x, self.h-y-h, w, h, color[c], fill\n end",
"def set_collision_rect(x, y, width, height)\r\r\n @collisionbox = Rect.new(x, y, width - 1, height - 1)\r\r\n end",
"def define_cursor_rect\n cursor_rect.set(-4, @index * default_line_height, cursorskin.width, cursorskin.height)\n end",
"def bottom\n self.ox = self.src_rect.width/2\n self.oy = self.src_rect.height\n end",
"def refreshCurrentValues()\n @curX = @object.x\n @curY = @object.y\n @curWidth = @object.width\n @curHeight = @object.height\n end",
"def set_at(x, y)\n self.x = x\n self.x = 0 if x < 0\n self.y = y\n if x + self.width > 640\n self.x -= (x + self.width - 640)\n end\n if y + self.height > 480\n self.y -= (y + self.height - 480)\n end\n end",
"def rect=(rectangle); end",
"def scrollable_rect\n Rect.new(0, 0, self.width * 32, self.height * 32)\n end",
"def initialize(args=nil)\n super(args)\n @track = Box.new(name:'track', parent:self)\n @bar = Box.new(name:'bar', parent:self, y:start, drag: @style[:drag])\n Draggable << @bar\n update_drag_restriction\n end",
"def initialize(args=nil)\n super(args)\n @track = Box.new(name:'track', parent:self)\n @bar = Box.new(name:'bar', parent:self, x:start, drag: @style[:drag])\n Draggable << @bar\n update_drag_restriction\n end",
"def set_size(w=nil, h=nil)\n @window_width = w if w.is_a?(Numeric) && w > 0 # TODO: > min_width\n @window_height = h if h.is_a?(Numeric) && h > 0 # TODO: > min_height\n super(@window_width, @window_height)\nend",
"def initialize(x, y)\n super(x, y, window_width, window_height)\n @pending_index = -1\n refresh\n end",
"def set(*args)\n if (a = args[0]).is_a? Rect\n @x, @y, @width, @height = a.x, a.y, a.width, a.height\n else\n @x, @y, @width, @height = *args\n end\n compute\n self\n end",
"def setviewport(*)\n super\n end",
"def set(x, y, width, height)\n @x = x\n @y = y\n @width = width\n @height = height \n end",
"def set(low, high, value, box)\n self.setLowHigh(low, high)\n self.setValue(value)\n self.setBox(box)\n end",
"def initialize\n super(0, 0, window_width, window_height) \n @item = nil\n @max = 1\n @number = 1\n @cursor_y = 0\n end",
"def setDimensions\n @intTargetX = @canvasRefrance.getCanvasWidth\n @intTargetY = @canvasRefrance.getCanvasHeight\n end",
"def set(x, y, width, height)\n @x = x\n @y = y\n @width = width\n @height = height\n end",
"def initialize(x, y)\r\n super(x, y, window_width, window_height)\r\n @pending_index = -1\r\n refresh\r\n end",
"def Edit_SetRect(hwndCtl, lprc) send_edit_message(hwndCtl, :SETRECT, lparam: lprc) end",
"def sliderPosition _args\n \"sliderPosition _args;\" \n end",
"def set_position(l=nil, t=nil)\n @window_left = l ||= @window_left\n @window_top = t ||= @window_top\n super(l, t)\n return self\nend",
"def setCenter(center)\n # center.x -> bounds.size.width, center.y -> bounds.size.height\n @target.bounds = CGRectMake(0, 0, center.x, center.y)\n end",
"def update_size(x,y)\n @width = x if x > @width\n @height = y if y > @height\n end",
"def initialize(window, rect, enemy, align=0, opacity=255, valign=0, resize_mode=nil, \n active=true, visible=true)\n super(active, visible)\n self.enemy = enemy\n @cEnemyGraphic = CResizableImage.new(window, rect, \n nil, nil, align, opacity, valign, resize_mode, \n active, visible)\n end",
"def y=(value)\n super\n update_background\n end",
"def update\n super\n # Scroll\n if self.active\n scroll_max = [self.contents.width - (self.width - 32), 0].max\n if Input.press?(Input::Y)\n self.ox = [self.ox - (self.width - 32), 0].max\n elsif Input.press?(Input::Z)\n self.ox = [self.ox + (self.width - 32), scroll_max].min\n end\n end\n end",
"def draw_rect(x,y,w,h,c)\n self.draw_quad(x,y,c,x+w,y,c,x,y+h,c,x+w,y+h,c)\n end",
"def update\n super\n update_bitmap # Update HP Graphic\n update_screen # Update the position the graphic should be displayed\n end",
"def initialize(width, choices, viewport = nil)\n super(viewport)\n @texts = UI::SpriteStack.new(self)\n @choices = choices\n @colors = Array.new(@choices.size, get_default_color)\n @index = $game_temp ? $game_temp.choice_start - 1 : 0\n @index = 0 if @index >= choices.size || @index < 0\n lock\n self.width = width if width\n @autocalc_width = !width\n self.cursorskin = RPG::Cache.windowskin(CursorSkin)\n define_cursor_rect\n self.windowskin = RPG::Cache.windowskin(current_windowskin)\n # Should be set at the end of the important ressources loading\n self.window_builder = current_window_builder\n self.active = true\n unlock\n @my = Mouse.y\n end",
"def initialize(window, rect, color1, color2=color1, vertical=false, \n active=true, visible=true)\n super(window, active, visible)\n self.rect = rect\n self.color1 = color1\n self.color2 = color2\n self.vertical = vertical\n end",
"def create_button_slider(cach_button_bmp)\n @button_slider_sprite = push 0, 0, nil\n @button_slider_sprite.set_bitmap(cach_button_bmp).set_rect_div(0, 0, 1, 2)\n end",
"def initialize(x, y, width, height, actor)\n super(x, y, width, height)\n @cBackCharImage = CResizableImage.new(self, Rect.new(0, 0, self.contents.width, self.contents.height), \n nil, nil, 0, 255, 2, 3)\n window_update(actor)\n end",
"def height=(height) @side = height end",
"def initialize(window, rect, img_bitmap, src_rect, align=0, opacity=255, \n valign=0, active=true, visible=true)\n super(window, active, visible)\n self.rect = rect\n self.img_bitmap = img_bitmap\n self.src_rect = src_rect\n self.align = align\n self.opacity = opacity\n self.valign = valign\n end",
"def set_x_y(x,y)\r\n @x=x\r\n @y=y\r\n self\r\n end",
"def initialize(x, y, label = \"\")\n super(x, y, window_width, window_height)\n refresh(label)\n end",
"def item_rect_for_cursor(index)\n rect = item_rect(index)\n rect.x += 6\n rect.y = 0\n rect.height = fitting_height(0) + 6\n rect.width = 148\n rect\n end",
"def draggable(renderer, event_handler_registry, handle_w, handle_h, region_rect, ui_state, &on_change)\n handle_x = ui_state[:handle_x] || 0\n handle_y = ui_state[:handle_y] || 0\n if !(ui_state[:pressed])\n evh = { type: :mouse_down, rect: Rect.new(handle_x, handle_y, handle_w, handle_h), callback: proc { |_ev|\n if !(ui_state[:pressed])\n ui_state[:pressed] = true\n yield(ui_state) if on_change\n true\n else\n false\n end\n } }\n event_handler_registry.register_event_handler(evh)\n else\n evh2 = { type: :mouse_move, callback: proc { |ev|\n if ui_state[:pressed] == true\n new_handle_x = (ui_state[:handle_x] || 0) + ev.xrel\n new_handle_x = region_rect.x if new_handle_x < region_rect.x\n new_handle_x = region_rect.x2 if new_handle_x > region_rect.x2\n\n new_handle_y = (ui_state[:handle_y] || 0) + ev.yrel\n new_handle_y = region_rect.y if new_handle_y < region_rect.y\n new_handle_y = region_rect.y2 if new_handle_y > region_rect.y2\n\n ui_state[:handle_x] = new_handle_x\n ui_state[:handle_y] = new_handle_y\n\n yield(ui_state) if on_change\n true\n else\n false\n end\n } }\n\n evh1 = { type: :mouse_up, callback: proc { |_ev|\n if ui_state[:pressed] == true\n ui_state[:pressed] = false\n yield(ui_state) if on_change\n true\n else\n false\n end\n } }\n\n event_handler_registry.register_event_handler(evh1)\n event_handler_registry.register_event_handler(evh2)\n end\nend",
"def update_pos( dt )\n @px += @vx * dt\n @py += @vy * dt\n @rect.center = [@px, @py]\n end",
"def setScaleShiftByBoundaryBox(x0,y0,x1,y1)\n @device.setScaleShiftByBoundaryBox(x0,y0,x1,y1) ;\n end",
"def bottom!\n self.ox = self.width/2\n self.oy = self.height\n end",
"def initialize(window, rect, volumeLabel, bar_color1, bar_color2, bar_back_color,\n bar_border, bar_border_color,\n active=true, visible=true)\n super(active, visible)\n \n # Determine rectangles to position controls\n rects = determine_rects(rect)\n \n @cVolumeLabel = CLabel.new(window, rects[0], volumeLabel, 0, Font.bold_font)\n @cVolumeLabel.active = active\n @cVolumeLabel.visible = visible\n \n @ucVolumeBar = UCBar.new(window, rects[1], bar_color1, bar_color2,\n bar_back_color, 0, 100, bar_border, bar_border_color)\n @ucVolumeBar.active = active\n @ucVolumeBar.visible = visible\n \n @cMuteLabel = CLabel.new(window, rects[2], Vocab::system_mute_volume_label, 1)\n @cMuteLabel.active = active\n @cMuteLabel.visible = visible\n \n @cVolumeValue = CLabel.new(window, rects[3], 0, 1)\n @cVolumeValue.active = active\n @cVolumeValue.visible = visible\n end",
"def initialize(window, rect, volumeLabel, bar_color1, bar_color2, bar_back_color,\n bar_border, bar_border_color,\n active=true, visible=true)\n super(active, visible)\n \n # Determine rectangles to position controls\n rects = determine_rects(rect)\n \n @cVolumeLabel = CLabel.new(window, rects[0], volumeLabel, 0, Font.bold_font)\n @cVolumeLabel.active = active\n @cVolumeLabel.visible = visible\n \n @ucVolumeBar = UCBar.new(window, rects[1], bar_color1, bar_color2,\n bar_back_color, 0, 100, bar_border, bar_border_color)\n @ucVolumeBar.active = active\n @ucVolumeBar.visible = visible\n \n @cMuteLabel = CLabel.new(window, rects[2], Vocab::system_mute_volume_label, 1)\n @cMuteLabel.active = active\n @cMuteLabel.visible = visible\n \n @cVolumeValue = CLabel.new(window, rects[3], 0, 1)\n @cVolumeValue.active = active\n @cVolumeValue.visible = visible\n end",
"def item_rect(index)\n rect = super\n rect.y = index / col_max * (item_height + standard_padding)\n rect\n end",
"def draw_rect x, y, w, h, c\n @screen.fill_rect x, y, w, h, c\n @screen.fill_rect x + 1, y + 1, w - 2, h - 2, 0x000000\n end",
"def update_screen\n self.x = screen_x unless self.disposed?\n self.y = screen_y unless self.disposed?\n end",
"def update_screen\n self.x = screen_x unless self.disposed?\n self.y = screen_y unless self.disposed?\n end",
"def setwsviewport(*)\n super\n end",
"def initialize(window, victory_item, rect, spacing=8,\n active=true, visible=true)\n super(active, visible)\n @victory_item = victory_item\n \n # Determine rectangles to position controls\n rects = determine_rects(rect, spacing)\n \n @ucIcon = UCIcon.new(window, rects[0], victory_item.item.icon_index)\n @ucIcon.active = active\n @ucIcon.visible = visible\n \n @cItemName = CLabel.new(window, rects[1], victory_item.item.name)\n @cItemName.active = active\n @cItemName.visible = visible\n @cItemName.cut_overflow = true\n \n @cItemNumber = CLabel.new(window, rects[2], \n sprintf(VICTORY_CONFIG::ITEM_NUMBER_PATTERN, \n victory_item.quantity), 2)\n @cItemNumber.active = active\n @cItemNumber.visible = visible\n end",
"def update\n super\n\n @status_text.text = \"#{self.mouse_x.to_i} / #{self.mouse_y.to_i}\"\n\n @text.text = @selected_game_object.to_s\n\n #\n # We got a selected game object and the left mouse button is held down\n #\n if @left_mouse_button && @selected_game_object && @selected_game_object.class != Background\n selected_game_objects.each do |selected_game_object|\n selected_game_object.x = self.mouse_x + selected_game_object.options[:mouse_x_offset]\n selected_game_object.y = self.mouse_y + selected_game_object.options[:mouse_y_offset]\n\n if @snap_to_grid\n #Adapted setting to snap object to center of grid cell\n selected_game_object.x -= -16 + (selected_game_object.x % @grid[0])\n selected_game_object.y -= -16 + (selected_game_object.y % @grid[1])\n end\n end\n elsif @left_mouse_button\n if defined?(self.previous_game_state.viewport)\n self.previous_game_state.viewport.x = @left_mouse_click_at[0] - $window.mouse_x\n self.previous_game_state.viewport.y = @left_mouse_click_at[1] - $window.mouse_y\n end\n end\n\n if inside_window?($window.mouse_x, $window.mouse_y)\n scroll_right if $window.mouse_x > $window.width - @scroll_border_thickness\n scroll_left if $window.mouse_x < @scroll_border_thickness\n scroll_up if $window.mouse_y < @scroll_border_thickness\n scroll_down if $window.mouse_y > $window.height - @scroll_border_thickness\n end\n end",
"def mouse_x=(value); self.set_mouse_x(value); end",
"def initialize(window, rectLabel, rectValue, label, value, active=true, \n visible=true)\n super(active, visible)\n @cLabel = CLabel.new(window, rectLabel, label)\n @cLabel.active = active\n @cLabel.visible = visible\n @cValue = CLabel.new(window, rectValue, value)\n @cValue.active = active\n @cValue.visible = visible \n end",
"def rect\n get.rect\n end",
"def y=(y)\n super(y)\n @selectable_window.y = y+@header_height+8\n end",
"def initialize(x, y, width, height)\n super(x, y, width, height)\n self.index = 0\n self.active = true\n @category = :all\n refresh\n end",
"def draw_advanced(rect, item) end",
"def update!(**args)\n @xmax = args[:xmax] if args.key?(:xmax)\n @xmin = args[:xmin] if args.key?(:xmin)\n @ymax = args[:ymax] if args.key?(:ymax)\n @ymin = args[:ymin] if args.key?(:ymin)\n end",
"def update_widget_from_object(obj)\n\t\t@obj = obj\n\t\tif obj\n\t\t\t#@x1.value = obj.x1 #- obj.origin_x\n\t\t\t@x1.set_value_silent(obj.x1)\n\t\t\t@y1.set_value_silent(obj.y1)\n\t\t\t@x2.set_value_silent(obj.x2)\n\t\t\t@y2.set_value_silent(obj.y2)\n\t\t\t#@y1.value = obj.y1 #- obj.origin_y\n\t\t\t#@x2.value = obj.x2 #- obj.origin_x\n\t\t\t#@y2.value = obj.y2 #- obj.origin_y\n\t\t\t#@color.active_id = obj.color.to_s\n\t\t\t#@color.active = obj.color\n\t\t\t@color.set_active_silent(obj.color)\n\t\t\tself.update_attributes\n\t\tend\n\tend",
"def update_classic\n @equip_icon.zoom_x += 0.2 if @equip_icon.zoom_x < 2.0\n @equip_icon.zoom_y += 0.2 if @equip_icon.zoom_y < 2.0\n @scroll_icon.zoom_x += 0.2 if @scroll_icon.zoom_x < 2.0\n @scroll_icon.zoom_y += 0.2 if @scroll_icon.zoom_y < 2.0\n end",
"def bounds\n bounds_dependencies = [absolute_x, absolute_y, calculated_width, calculated_height]\n if bounds_dependencies != @bounds_dependencies\n # avoid repeating calculations\n absolute_x, absolute_y, calculated_width, calculated_height = @bounds_dependencies = bounds_dependencies\n @bounds = org.eclipse.swt.graphics.Rectangle.new(absolute_x, absolute_y, calculated_width, calculated_height)\n end\n @bounds\n end",
"def update_bounds(xs, ys)\n x_min = xs.min\n x_max = xs.max\n y_min = ys.min\n y_max = ys.max\n @left = x_min unless @left && x_min > @left\n @top = y_min unless @top && y_min > @top\n @right = x_max unless @right && x_max < @right\n @bottom = y_max unless @bottom && y_max < @bottom\n nil\n end",
"def place drawable, x,y\n drawn = drawable.draw self\n dst = SDL2::Rect.new x,y, drawn.w,drawn.h\n copy drawn,nil,dst\n end",
"def set_parameters(x, y, w, h)\n @x = x\n @y = y\n @width = w\n @height = h\n end",
"def set_parameters(x, y, w, h)\n @x = x\n @y = y\n @width = w\n @height = h\n end",
"def sliderRange _args\n \"sliderRange _args;\" \n end",
"def setContentArea(x, y, width, height)\n setContentAreaX(x)\n setContentAreaY(y)\n setContentAreaWidth(width)\n setContentAreaHeight(height)\n self\n end"
] | [
"0.6686312",
"0.65477455",
"0.6507212",
"0.6379917",
"0.621754",
"0.618316",
"0.6141478",
"0.6139662",
"0.61041296",
"0.6004107",
"0.59860784",
"0.5912727",
"0.59074444",
"0.58716637",
"0.5793109",
"0.5784767",
"0.5745929",
"0.5740602",
"0.5733178",
"0.5700126",
"0.56826675",
"0.5679409",
"0.5678422",
"0.56435376",
"0.5642432",
"0.56355596",
"0.559827",
"0.559827",
"0.5579384",
"0.5561942",
"0.55484134",
"0.5545472",
"0.5540737",
"0.5530512",
"0.5523259",
"0.55205023",
"0.5486791",
"0.54728645",
"0.5453696",
"0.5452912",
"0.5446218",
"0.54395086",
"0.5435458",
"0.54282165",
"0.5421101",
"0.5412064",
"0.5386518",
"0.5384336",
"0.5383247",
"0.53799826",
"0.5370638",
"0.5368623",
"0.5367065",
"0.53542876",
"0.5348491",
"0.5333677",
"0.53331244",
"0.53327495",
"0.53281814",
"0.5321704",
"0.53198105",
"0.5314831",
"0.52833545",
"0.5282772",
"0.52753",
"0.5263752",
"0.5262367",
"0.5251579",
"0.52464575",
"0.5245237",
"0.52383596",
"0.5235867",
"0.52100617",
"0.52093023",
"0.51965266",
"0.51918805",
"0.51918805",
"0.5190608",
"0.51855326",
"0.518445",
"0.518445",
"0.5165516",
"0.51639223",
"0.51612437",
"0.515874",
"0.5154267",
"0.5150117",
"0.5145836",
"0.51425225",
"0.5138565",
"0.51385486",
"0.5134809",
"0.5134301",
"0.51307136",
"0.51305217",
"0.5128348",
"0.5128137",
"0.5128137",
"0.5122333",
"0.51156265"
] | 0.53128964 | 62 |
Whether or not this is fully showing | def showing?
return true if @state == STATE_DISPLAYED
ext = extent
case(@anchor)
when ANCHOR_TOP
return @slide_offset >= 0
when ANCHOR_LEFT
return @slide_offset >= 0
when ANCHOR_BOTTOM
return @slide_offset <= -ext
when ANCHOR_RIGHT
return @slide_offset <= -ext
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def refresh_view?\n visible? && (ox >= bordered_width || oy >= bordered_height)\n end",
"def visible?\n true\n end",
"def visible?\n @visible\n end",
"def visible?\n @visible\n end",
"def visible?\n\t\t\t@visible\n\t\tend",
"def show_full\n @show_full=true\n end",
"def is_full?\n page.is_full?\n end",
"def visible?; \n\t\t@visible = true if @visible == nil\n\t\t@visible\n\tend",
"def is_visible?\n setup.show_all? || Date.today >= start_at\n end",
"def visible?\n end",
"def is_visible?\n visibility && ( visibility > 0 )\n end",
"def visible?\n @style.display != 'none'\n end",
"def displayed?; end",
"def refresh_view?\n cursor.visible? && cursor.ox >= width || cursor.oy >= height\n end",
"def visible\n return @viewport.visible\n end",
"def visible?\n el.displayed?\n end",
"def visible?\n wd_element.displayed?\n end",
"def visible\n return @bar.visible\n end",
"def just_shown_results?\n return true if get[just_shown_key]\n end",
"def show?\n return true\n end",
"def show; @showing = false; end",
"def show(&block)\n visible block || true\n end",
"def showable?\n hidden? && hidden_origin.blank?\n end",
"def showable?\n hidden? && hidden_origin.blank?\n end",
"def show?\n true\n end",
"def displayed?\n as_page_class.displayed?\n end",
"def splayed?\n splayed\n end",
"def full_display?(flat_answer_data)\n true\n end",
"def visible?\n @dialog ? true : false\n end",
"def visible?\n !value.nil?\n end",
"def show_only?\n now = Time.now.to_i\n now >= end_time && now <= close_time\n end",
"def visible?(sx,sy)\n (sx + @width / 2 > 0 && sx - @width / 2 < Common::SCREEN_X && sy + @height / 2 > 0 &&\n sy - @height / 2 < Common::SCREEN_Y)\n end",
"def visible?\n Waiter.wait_for do\n inst = presence\n !! inst && inst.visible?\n end\n end",
"def visible?\n @a.nonzero?\n end",
"def show?\n true\n end",
"def show?\n true\n end",
"def show?\n true\n end",
"def show?\n true\n end",
"def message_window_showing\n if Input.timed_permanent? and $scene.is_a?(Scene_Battle)\n return false\n else\n return @message_window_showing\n end\n end",
"def displayed?\n static_module.displayed?\n end",
"def displayed?\n return (self.d_publish <= Time.zone.today) && (self.d_remove > Time.zone.today)\n end",
"def show\n self.visible = true\n end",
"def visible?(sx,sy)\n (sx + @width > 0 && sx < Common::SCREEN_X && sy + @height > 0 &&\n sy < Common::SCREEN_Y)\n end",
"def full?\n update_indicator.eql?('F')\n end",
"def show!\n visible(true)\n end",
"def show!\n visible(true)\n end",
"def needs_redraw?\n\t\t@swimming || @party_horn.needs_redraw?\n\tend",
"def third_screen_visible?\n @third_screen_title.visible?\n end",
"def is_full\n self.openings == 0\n end",
"def hidden?\n not visible\n end",
"def cvv_help_displayed?\n cvv_help_div.visible?\n end",
"def full?\n @top == (@size - 1)\n end",
"def full?\n @top == (@size - 1)\n end",
"def full?\n @top == (@size - 1)\n end",
"def full?\n @top == (@size - 1)\n end",
"def full?\n\t\treturn self.debate_participations.count == 2\n\tend",
"def rendered?\n end",
"def none?\n self.display_on == \"none\"\n end",
"def appear\n @inposition = false\n @loaded = true\n end",
"def present?\n exists? && visible?\n end",
"def show?\n return true if user.admin? || is_in_company?(record)\n\n record.information_complete? ? true : false\n end",
"def invisible?\n false\n end",
"def screening?\n self.tip == \"screening\"\n end",
"def is_full()\n \n end",
"def is_hidden?\n\t\tself.hidden\n\tend",
"def full?\n\t\t\t\t@available <= 0\n\t\t\tend",
"def full?\n end",
"def visible?\n parent.nil?\n end",
"def display_featured_works?\n Flipflop.show_featured_works?\n end",
"def visibility?\n @visibility || true\n end",
"def visible?\n assert_exists\n @element.displayed?\n end",
"def visible?\n assert_exists\n @element.displayed?\n end",
"def show_orientation_block?\n instructor? && !instructor_has_completed_orientation? && @current.empty? && @past.empty?\n end",
"def display_bar?\n !@options['debug'] && !@options['silent']\n end",
"def visible\n return @stack[1].visible\n end",
"def invisible?\n false\n end",
"def invisible?\n false\n end",
"def hidden?\n false\n end",
"def visible?\n !(self.created? || self.suspended? || self.deleted?)\n end",
"def visible?\n !(self.created? || self.suspended? || self.deleted?)\n end",
"def visible?\n !(self.created? || self.suspended? || self.deleted?)\n end",
"def rendered?\n !!@render_opts\n end",
"def atlas_visible?\n return quality_checked? && open_access? && geom?\n end",
"def show\n @visible = true\n self\n end",
"def active?\n needs_view || ends_at > Time.now.getutc\n end",
"def splash_displayed?\n displayed? SPLASH_VIDEO\n end",
"def display_square\n\t\t@displayed = true\n\tend",
"def is_showable\n return true unless self.is_hidden or self.is_deleted or self.status == \"tmp\"\n end",
"def full?\n false\n end",
"def second_screen_visible?\n @second_screen_title.visible?\n end",
"def visualizer_set?\n !@visualizer.nil?\n end",
"def visible?() \n if @visible.nil?\n visible = parent.nil? ? true : parent.visible?\n else\n visible = @visible\n end\n visible = instance_eval &visible if visible.kind_of?(Proc)\n visible\n end",
"def isfull?\n binding.pry\n if self.vol_requests > self.signups.length\n binding.pry\n return false\n end\n return true\n end",
"def draw?\n won? == false && full? ? true : false\n end",
"def shown_in_menu?\n false\n end",
"def needs_redraw?\n @needs_redraw\n end",
"def needs_redraw?\n return (@main_window_widget.needs_redraw? or not @main_window_widget.throttle_render?)\n end",
"def complete?\n synchronize do\n (@bars - [@top_bar]).all?(&:complete?)\n end\n end",
"def hidden?\n false\n end",
"def hidden?\n false\n end"
] | [
"0.7353578",
"0.7344829",
"0.73133254",
"0.73133254",
"0.7290243",
"0.7274726",
"0.72310144",
"0.7222399",
"0.7220974",
"0.7199099",
"0.7175825",
"0.7119526",
"0.71025115",
"0.68979704",
"0.6869228",
"0.6859618",
"0.6771067",
"0.6769969",
"0.6710631",
"0.6702703",
"0.6665202",
"0.66554093",
"0.6649743",
"0.6649743",
"0.6647883",
"0.6641066",
"0.6627024",
"0.6608471",
"0.658584",
"0.65775645",
"0.6566062",
"0.6545022",
"0.6541424",
"0.6540903",
"0.65249485",
"0.65249485",
"0.65249485",
"0.65249485",
"0.6519214",
"0.65066224",
"0.65051615",
"0.6492334",
"0.6481984",
"0.6474798",
"0.6470987",
"0.6470987",
"0.6466297",
"0.6465415",
"0.64530426",
"0.64462215",
"0.6436216",
"0.64183354",
"0.64183354",
"0.64183354",
"0.64183354",
"0.64005196",
"0.63951147",
"0.63940513",
"0.6378849",
"0.63768464",
"0.6376038",
"0.6366509",
"0.63648266",
"0.6358557",
"0.63300186",
"0.63254744",
"0.6323024",
"0.6312793",
"0.630804",
"0.63054365",
"0.6305059",
"0.6305059",
"0.63017756",
"0.6284109",
"0.6277641",
"0.626748",
"0.626748",
"0.62636316",
"0.6259291",
"0.6259291",
"0.6259291",
"0.6250104",
"0.62347335",
"0.62336904",
"0.62298477",
"0.62269014",
"0.6223098",
"0.6221777",
"0.62196004",
"0.6213667",
"0.6209899",
"0.62076867",
"0.62048846",
"0.6203512",
"0.6197356",
"0.6189683",
"0.61867875",
"0.61722624",
"0.61653906",
"0.61653906"
] | 0.6550656 | 31 |
Adjusts the scale (and inverse_scale) by the given amount ZOOM_SPEED | def zoom(delay)
self.scale += delay * ZOOM_SPEED
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_zoom\n @effectus_old_zoom_x = @picture.zoom_x\n @effectus_old_zoom_y = @picture.zoom_y\n self.zoom_x = @effectus_old_zoom_x / 100.0\n self.zoom_y = @effectus_old_zoom_y / 100.0\n end",
"def vscale(factor)\n @height = @height * factor\n @top *= factor\n self\n end",
"def zoom(duration, value, force = false)\n return if @zoom && !force\n @zoom = true\n @zoom_duration = duration\n @zoom_total_duration = duration\n @zoom_valuex = (value-self.zoom_x)/(duration/2)\n @zoom_valuey = (value-self.zoom_y)/(duration/2)\n @zoom_ori_zvaluex = self.zoom_x\n @zoom_ori_zvaluey = self.zoom_y\n @zoom_x = self.x\n @zoom_y = self.y\n @zoom_phase = 0\n self.ox = self.width/2\n self.oy = self.height/2\n self.x += self.width/2\n self.y += self.height/2\n end",
"def scale_up(scale)\n self.side_length *= scale\n end",
"def scale(factor_x, factor_y, around_x, around_y, &rendering_code); end",
"def scale(factor_x, factor_y=factor_x, &rendering_code); end",
"def scale\n scale_factor = 20 # template.png height divided by canvas height\n self.image_x *= scale_factor\n self.image_y *= scale_factor\n self.height *= scale_factor\n self.width *= scale_factor\n self.rotation *= 180 / Math::PI\n end",
"def zoom_factor\n Zif.position_math(:fdiv, [@cur_w, @cur_h], zoom_unit)\n end",
"def hscale(factor)\n @width *= factor\n @left *= factor\n self\n end",
"def scale( scale_x, scale_y = scale_x )\n dup.scale!(scale_x, scale_y)\n end",
"def update_classic\n @equip_icon.zoom_x += 0.2 if @equip_icon.zoom_x < 2.0\n @equip_icon.zoom_y += 0.2 if @equip_icon.zoom_y < 2.0\n @scroll_icon.zoom_x += 0.2 if @scroll_icon.zoom_x < 2.0\n @scroll_icon.zoom_y += 0.2 if @scroll_icon.zoom_y < 2.0\n end",
"def scale= scale\n protected_use_method(MM::Scaling, :@scale, scale)\n end",
"def scale(value)\r\n value * @height/2 + @height/4\r\n end",
"def scale(by)\n @x = @x * by\n @y = @y * by\n @z = @z * by\n self\n end",
"def zoom(factor = 5)\n upstream_slice = self.class.sketch.slices.select{|s| s.start_cumulative_bp < @start_cumulative_bp}.sort_by{|s| s.start_cumulative_bp}[-1]\n downstream_slice = self.class.sketch.slices.select{|s| s.start_cumulative_bp > @start_cumulative_bp}.sort_by{|s| s.start_cumulative_bp}[0]\n\n center_bp = (@start_cumulative_bp + @length_bp.to_f/2).round\n\n @length_bp = (@length_bp.to_f/factor).round\n @start_cumulative_bp = (center_bp - @length_bp.to_f/2).round\n @stop_cumulative_bp = (center_bp + @length_bp.to_f/2 - 1).round\n @resolution = @length_pixel.to_f/@length_bp\n self.fetch_sequence\n\n upstream_slice.stop_cumulative_bp = @start_cumulative_bp - 1\n downstream_slice.start_cumulative_bp = @stop_cumulative_bp + 1\n [upstream_slice, downstream_slice].each do |s|\n s.length_bp = s.stop_cumulative_bp - s.start_cumulative_bp + 1\n s.resolution = s.length_pixel.to_f/s.length_bp\n s.range_cumulative_bp = Range.new(s.start_cumulative_bp, s.stop_cumulative_bp)\n s.fetch_sequence\n end\n self.class.sketch.slices.each{|s| s.format_resolution}\n\n self.class.sketch.buffer_images[:zoomed] = self.class.sketch.draw_zoomed_buffer\n self.class.sketch.buffer_images[:information_panel] = self.class.sketch.draw_information_panel\n end",
"def zoom(percent)\n set RGhost::Scale.zoom(percent)\n end",
"def adjust_y_to_zoom(value)\n return (value * $game_temp.temp_zoom_value) + $game_temp.temp_zoom_adj_y - ($game_temp.center_adj_y / 2)\n end",
"def zoom_to_level(new_level)\n zoom_to_scale ZOOM_SCALE[new_level]\n end",
"def scales\n \n end",
"def update_controller_scaling(view)\n\n @scale_origin =\n if @path.empty?\n @ip.position\n else\n bb = Geom::BoundingBox.new\n bb.add @path\n bb.center\n end\n\n nil\n\n end",
"def scale(*amount)\n self.dup.scale! *amount\n end",
"def scale_by num\n min = 0.001\n num = min if num < min and num > -min\n self.x = self.x / num\n self.y = self.y / num\n self\n end",
"def scale=(val)\n self['scale'] = val\n end",
"def zoom(scale)\n execute_script(%(document.body.style.zoom = arguments[0];), scale)\n end",
"def setscale(*)\n super\n end",
"def sub_scale _value, _abs=0\n send_cmd(\"sub_scale #{_value} #{_abs}\")\n end",
"def scale!(type, size)\n @transforms << \"#{SCALE_TYPES[type]}#{size}\"\n self\n end",
"def bw_zoom(transition_sprite)\n 60.times do\n transition_sprite.zoom_x = (transition_sprite.zoom_y *= 1.005)\n Graphics.update\n end\n 30.times do\n transition_sprite.zoom_x = (transition_sprite.zoom_y *= 1.01)\n transition_sprite.opacity -= 9\n Graphics.update\n end\n transition_sprite.bitmap.dispose\n transition_sprite.dispose\n end",
"def set_zoom(scale = 100)\n # Confine the scale to Excel's range\n if scale < 10 or scale > 400\n # carp \"Zoom factor $scale outside range: 10 <= zoom <= 400\";\n scale = 100\n end\n\n @zoom = scale.to_i\n end",
"def scale!(rhs)\n scale rhs, self\n end",
"def recalculate!(scale)\n neighbor = @coordinate.zoom_to(zoom - 1)\n parent = neighbor.container\n\n col_shift = 2 * (neighbor.column - parent.column)\n row_shift = 2 * (neighbor.row - parent.row)\n\n @offset.x -= scale * tile_width * col_shift\n @offset.y -= scale * tile_height * row_shift\n @coordinate = parent\n end",
"def scale(xscale, yscale = xscale)\n current_transformation.scale(xscale, yscale)\n self[\"transform\"] = current_transformation.to_s\n end",
"def scale_to(new_size)\n scale_to(new_size, background:nil)\n end",
"def scaled_to(destination)\n Scale.transform(self).to(destination)\n end",
"def update_scale\n heightpx = @board.height*@scale\n widthpx = @board.width*@scale\n # Gameboard\n @board_window.height = heightpx\n @board_window.width = widthpx\n # Clue windows\n @clues_windows[:rows].height = heightpx\n @clues_windows[:rows].width = (@scale*@board.clues[:rows].map { |row| row.length }.max)\n @clues_windows[:columns].height = (@scale*@board.clues[:columns].map { |column| column.length }.max)\n @clues_windows[:columns].width = widthpx\n # Clues\n @clues_list.each { |clue| clue[:text_object].delete() }\n @passing.each { |pass| pass.remove() }\n @passing = draw_passing(@board.clues)\n @clues_list = draw_clues(@board.clues)\n # Blocks\n @blocks.each do |block, cell|\n x = block.coords[:x]*@scale\n y = block.coords[:y]*@scale\n cell.coords = [x, y, x+@scale, y+@scale]\n end\n # Guide lines\n @guide_lines.each { |line| line.remove }\n @guide_lines = draw_guide_lines()\n update_highlight()\n end",
"def modify_image\n if @vertical\n @main_image = @main_image.zooming_v\n else\n @main_image = @main_image.zooming_h\n end\n end",
"def zoom(x_factor, y_factor = x_factor)\n unary_transform(\"zoom\", x_factor.to_i, y_factor.to_i)\n end",
"def zoom_2\n return 1.0\n end",
"def scale\n raise NotImplementedError, \"Subclass responsibility\"\n end",
"def zoom=(val)\n self.zoom_x = val\n self.zoom_y = val\n end",
"def zoom=(val)\n self.zoom_x = val\n self.zoom_y = val\n end",
"def scale(sx,sy)\n set RGhost::Scale.new(sx,sy)\n \n end",
"def zoom_to_scale(new_scale)\n @map_state.merge!('Scale' => new_scale)\n update_session do |xml|\n set_map_state xml\n end\n \n TileCache.clear(@session_id)\n end",
"def scale_factor(point, view)\n\n px_to_length(view)/view.pixels_to_model(1, point)\n\n end",
"def scaling\n @scaling || 0.0\n end",
"def scale_by(width_factor, height_factor, &block)\n squish(width*width_factor, height*height_factor, &block)\n end",
"def scale(value)\n @tileset.scale = value\n end",
"def setScaleShiftByBoundaryBox(x0,y0,x1,y1)\n @device.setScaleShiftByBoundaryBox(x0,y0,x1,y1) ;\n end",
"def scale factor\n Vector.new factor * @x, factor * @y, factor * @z\n end",
"def for_zoom_liveness\n for_liveness_type(Constants::ZOOM)\n end",
"def rescale\r\n unless self.is_si?\r\n return self.to_base_unit.rescale\r\n end\r\n scale=Math.log10(self.value)+self.unit.scale\r\n\r\n unit_scales=self.class.si_unit_scales.sort\r\n\r\n if scale<unit_scales[0][0]\r\n return self.send unit_scales[0][1].name\r\n end\r\n if scale>=unit_scales.last[0]\r\n return self.send unit_scales.last[1].name\r\n end\r\n unit_scales.each_cons(2) do |us|\r\n if us[0][0]<=scale && us[1][0]>scale\r\n return self.send us[0][1].name\r\n end\r\n end\r\n end",
"def scale!( scale_x, scale_y = scale_x )\n raise \"can't modify frozen object\" if frozen?\n @x, @y = @x * scale_x, @y * scale_y\n @hash = nil\n self\n end",
"def zoom_1\n return 1.0\n end",
"def move\n @speed = map1d(total_count, (5..25), (0.1..0.4))\n @speed = (0..10.0).clip speed\n @position[Y] += speed\n @position[Y] = -height if position[Y] > height * 2\n end",
"def scale_to_length(new_length)\n self.scale(new_length / length)\n end",
"def scale(factor)\n Point.new(self.x * factor, self.y * factor)\n end",
"def set_speed(input_speed)\n @speed = input_speed * MULTIPLIER\n end",
"def scale_degrees\n \n end",
"def scale=(scaling)\n @scale.set(scaling)\n @transform_dirty = true\n scaling\n end",
"def scaleTo2 (target_dims_meters)\n bb = Sketchup.active_model.bounds \n car_height = bb.depth\n min_mirror_z = 0.50 * car_height # mirrors are definitely higher than this\n\n adjusted_car_width = 0 # init with large number\n adjusted_car_min = 0\n for part in Sketchup.active_model.definitions\n max_corner = part.bounds.max\n min_corner = part.bounds.min\n # \"max_corner.z > 0\" -- a hack to remove some exhilary shit\n if max_corner.z > 0 and max_corner.z < bb.depth * min_mirror_z\n adjusted_car_width = [max_corner.y.to_m, adjusted_car_width].max\n #puts part.name, max_corner, adjusted_car_width\n adjusted_car_min = [min_corner.y.to_m, adjusted_car_min].min\n #puts part.name, max_corner, adjusted_car_width\n end\n end\n raise 'adjusted_car_width < 0' if adjusted_car_width == 0\n puts \"adjusted max: #{adjusted_car_width}, min: #{adjusted_car_min}\"\n #puts \"car_width: #{bb.height.to_m}, adjusted: #{adjusted_car_width}\"\n\n # here's what we want to have\n target_car_width = target_dims_meters[1]\n # we only use car width (y) as the most reliable (no extra stuff on sides)\n scale = target_car_width / adjusted_car_width\n origin = Geom::Point3d.new 0,0,0\n transform = Geom::Transformation.scaling origin, scale\n entities = Sketchup.active_model.entities\n #entities.transform_entities(transform, entities.to_a)\n\nend",
"def scale_and_translate(outline, width, height, scale, location)\r\n tr = Geom::Transformation.scaling(height * scale, height * scale,0)\r\n outline.collect!{|pt|\r\n pt.transform!(tr)\r\n pt[0] = pt[0] + width * scale - height * scale if pt[0] > height * scale/2\r\n pt\r\n }\r\n tr = Geom::Transformation.translation([location[0] * scale, location[1] * scale])\r\n outline.collect{|pt| pt.transform(tr)}\r\n end",
"def inqscale\n inquiry_int { |pt| super(pt) }\n end",
"def scaleimage **opts\n Vips::Image.scale self, **opts\n end",
"def scale(scale_val, objectify = false)\n scaled = Array.new(@n) { Array.new(@m) }\n 0.upto(@n - 1) do |i|\n 0.upto(@m - 1) do |j|\n scaled[i][j] = scale_val * @matrix[i][j]\n end\n end\n return VectorMatrix.new(nil, nil, nil, scaled) if objectify\n scaled\n end",
"def resize(width,height)\n\t\t@buffer=@buffer.scale(width, height, :bilinear)\n\tend",
"def set_print_scale(scale = 100)\n # Confine the scale to Excel's range\n if scale < 10 or scale > 400\n # carp \"Print scale $scale outside range: 10 <= zoom <= 400\";\n scale = 100\n end\n\n # Turn off \"fit to page\" option\n @fit_page = 0\n\n @print_scale = scale.to_i\n end",
"def init_zoom\r\n self.zoom = 1 # $zoom_factor\r\n @zoom = PSDK_CONFIG.specific_zoom || ZoomDiv[1] # $zoom_factor.to_i]\r\n end",
"def scale_to_fill(new_size, position:position, scale:scale)\n new_size = SugarCube::CoreGraphics::Size(new_size)\n my_size = self.size\n if new_size.width == my_size.width && new_size.height == my_size.height && self.scale == scale\n return self\n end\n\n # first, scale down; then we'll scale back up if we went too far\n if my_size.width > new_size.width\n my_size.height *= new_size.width / my_size.width\n my_size.width = new_size.width\n end\n\n if my_size.height > new_size.height\n my_size.width *= new_size.height / my_size.height\n my_size.height = new_size.height\n end\n\n if my_size.width < new_size.width\n my_size.height *= new_size.width / my_size.width\n my_size.width = new_size.width\n end\n\n if my_size.height < new_size.height\n my_size.width *= new_size.height / my_size.height\n my_size.height = new_size.height\n end\n\n if self.size.width == my_size.width && self.size.height == my_size.height\n return self\n end\n\n if position.is_a?(Symbol)\n min_x = 0\n min_y = 0\n max_x = my_size.width\n max_y = my_size.height\n mid_x = max_x / 2\n mid_y = max_y / 2\n case position\n when :top_left, :topleft\n position = CGPoint.new(min_x, min_y)\n when :top\n position = CGPoint.new(mid_x, min_y)\n when :top_right, :topright\n position = CGPoint.new(max_x, min_y)\n when :left\n position = CGPoint.new(min_x, mid_x)\n when :center\n position = CGPoint.new(mid_x, mid_x)\n when :right\n position = CGPoint.new(max_x, mid_x)\n when :bottom_left, :bottomleft\n position = CGPoint.new(min_x, max_y)\n when :bottom\n position = CGPoint.new(mid_x, max_y)\n when :bottom_right, :bottomright\n position = CGPoint.new(max_x, max_y)\n else\n raise \"Unknown position #{position.inspect}\"\n end\n else\n position = SugarCube::CoreGraphics::Point(position)\n end\n thumbnail_x = position.x * (new_size.width - my_size.width) / my_size.width\n thumbnail_y = position.y * (new_size.height - my_size.height) / my_size.height\n\n UIGraphicsBeginImageContextWithOptions(new_size, false, scale)\n thumbnail_rect = CGRectZero\n thumbnail_rect.origin = [thumbnail_x, thumbnail_y]\n thumbnail_rect.size = my_size\n\n self.drawInRect(thumbnail_rect)\n\n new_image = UIGraphicsGetImageFromCurrentImageContext()\n UIGraphicsEndImageContext()\n\n raise \"could not scale image\" unless new_image\n\n return new_image\n end",
"def zoom\n end",
"def updateExpBar\n return if self.disposed?\n @sprites[\"exp\"].zoom_x = @showexp ? self.exp : 0\n end",
"def scale(name)\n \n end",
"def update_position\n self.x = @battler.screen_x\n self.y = @battler.screen_y - (oy * zoom_y)\n self.z = @battler.screen_z\n end",
"def scale(w, h, method = :bilinear)\n @image.send(\"resample_#{method}!\", w, h)\n self\n end",
"def move_and_scale_up(cell)\n cell_clone = cell.clone\n cell_clone.x += grid.width + 1\n scale_up(cell_clone)\n end",
"def set_map_initial_zoom(city_size)\n case city_size\n when \"l\"\n 10 \n when \"m\" \n 11\n when \"s\" \n 12\n else \n 9\n end\n end",
"def scale!(*amount)\n x, y, z = amount.extract_vector3i!\n\n self.x, self.y, self.z = self.x*x, self.y*y, self.z*z\n self\n end",
"def update_circle(circle)\n circle.opacity -= 10\n circle.zoom_x += 0.04\n circle.zoom_y += 0.04\n if circle.zoom_x > 3\n circle.opacity = 255\n circle.zoom_x = 0.0\n circle.zoom_y = 0.0\n end\n end",
"def update(point)\n\t\tsuper(point)\n\t\t\n\t\t# simple ratio solution courtesy of this link\n\t\t# http://tech.pro/tutorial/691/csharp-tutorial-font-scaling\n\tend",
"def scale( value )\n ( value - @min ) / ( @max - @min )\n end",
"def scale(scale_x, scale_y, center_x = nil, center_y = nil)\n args = [scale_x, scale_y, center_x, center_y].compact\n @canvas << js_method('scale', *args)\n return self\n end",
"def fix_animation_sprites\n return unless $imported[:TSBS_Camera]\n return unless @ani_sprites && @animation\n if (@animation.position == 3 && !@anim_top == -1) || @anim_top == 1\n z_val = self.z + Graphics.height\n elsif @anim_top == -1\n z_val = 3\n else\n z_val = self.z + 3\n end\n frame = @animation.frames[frame_index]\n cell_data = frame.cell_data\n @ani_sprites.each_with_index do |sprite, i|\n next unless sprite\n next unless sprite.visible\n sprite.z = z_val + i\n next unless camera_animation_zoom?\n sprite.zoom_x = (cell_data[i, 3] / 100.0) * $tsbs_camera.zoom\n sprite.zoom_y = (cell_data[i, 3] / 100.0) * $tsbs_camera.zoom\n end\n end",
"def update_position\r\n set_position(@character.screen_x / @zoom, @character.screen_y / @zoom)\r\n self.z = @character.screen_z(@ch) + @add_z\r\n return true\r\n end",
"def ctrlSetScale _obj, _args\n \"_obj ctrlSetScale _args;\" \n end",
"def setscalefactors3d(*)\n super\n end",
"def scale(f)\n @x *= f\n @y *= f\n self\n end",
"def scale(*args)\n r = Rect.new x, y, w, h\n r.resolution = r.resolution * Vector2[args.singularize]\n r\n end",
"def set_fixed_scale(vmin,vmax,divisions=5,vxmin=0,vxmax=0,xdivisions=5)\n @vmin = vmin\n @vmax = vmax\n @divisions = divisions\n #TODO check\n #if (!vxnin == 0 )\n if (vxnin != 0 )\n @vxmin = vxmin\n @vxmax = vxmax\n @xdivisions = xdivisions\n end\n end",
"def scale\n self['scale']\n end",
"def update_capture_rect\n @zoom = [100, @zoom].max\n tx, ty = @zoom_target_x, @zoom_target_y\n f = @zoom / 100.0\n w = (Graphics.width / f).to_i\n h = (Graphics.height / f).to_i\n x = (tx - w / 2.0).to_i.bound(0, Graphics.width - w)\n y = (ty - h / 2.0).to_i.bound(0, Graphics.height - h)\n @capture_rect.set(x, y, w, h)\n end",
"def reset_page_scale_factor\n {\n method: \"Emulation.resetPageScaleFactor\"\n }\n end",
"def scale_to_fill(new_size, scale: scale)\n scale_to_fill(new_size, position: :center, scale: scale)\n end",
"def update(width, height)\n # Update velocity\n @velocity += @acceleration\n # Limit speed\n @velocity.set_mag(MAX_SPEED) { @velocity.mag > MAX_SPEED }\n @location += @velocity\n # Reset acceleration to 0 each cycle\n @acceleration *= 0\n @location.x = constrain(location.x, 0, width)\n @location.y = constrain(location.y, 0, height)\n end",
"def scale_by_bounds(dimensions)\n x = options[:width] / dimensions[0].to_f\n y = options[:height] / dimensions[1].to_f\n x * dimensions[1] > options[:height] ? y : x\n end",
"def scale_to_fill(new_size, position:position)\n scale_to_fill(new_size, position: position, scale: self.scale)\n end",
"def scale(factor)\n x = x2 - x1\n y = y2 - y1\n Line.new(p1, Point.new(x1 + (x * factor), y1 + (y * factor)))\n end",
"def set_speed(new_speed:)\n if new_speed > 30\n new_speed = 30\n elsif new_speed < 0\n new_speed = 0\n end\n @curr_speed = new_speed \n end",
"def clip_speed\n @speed = [@speed, @top_speed].min\n end",
"def increase_speed(delta=10)\n @speed += delta\n end",
"def zoom\n options[:zoom] || Config.zoom\n end",
"def scale_bilinear(*args)\n @source = BilinearScaler.new(@source, *args)\n self\n end"
] | [
"0.6357948",
"0.63213265",
"0.61937875",
"0.59902763",
"0.590808",
"0.58783174",
"0.5777762",
"0.577419",
"0.5753107",
"0.57381606",
"0.570375",
"0.5629697",
"0.5622397",
"0.5600802",
"0.5596528",
"0.55393696",
"0.5528985",
"0.54692817",
"0.5465162",
"0.5460557",
"0.54478043",
"0.54445183",
"0.5440548",
"0.5406406",
"0.53915447",
"0.53605264",
"0.5357015",
"0.5356862",
"0.534421",
"0.5340913",
"0.5337397",
"0.52916944",
"0.52862656",
"0.527147",
"0.52606356",
"0.5254781",
"0.52300656",
"0.520549",
"0.5190459",
"0.519029",
"0.519029",
"0.51886696",
"0.51804906",
"0.51577437",
"0.51572543",
"0.5153802",
"0.5131395",
"0.5119436",
"0.5116945",
"0.5082796",
"0.5080666",
"0.5076509",
"0.5072129",
"0.50627524",
"0.505939",
"0.50524133",
"0.50450176",
"0.502468",
"0.5013615",
"0.49969515",
"0.49904594",
"0.49899483",
"0.496702",
"0.49594855",
"0.49569178",
"0.49445957",
"0.48983768",
"0.4893923",
"0.4885613",
"0.48690867",
"0.48666748",
"0.4817246",
"0.48112524",
"0.48038352",
"0.48022828",
"0.47903934",
"0.47784355",
"0.47775242",
"0.47688225",
"0.47607055",
"0.47603026",
"0.47584686",
"0.47567108",
"0.47533244",
"0.47526723",
"0.47475275",
"0.47472432",
"0.47310573",
"0.4724428",
"0.47202706",
"0.47179288",
"0.4697848",
"0.46929243",
"0.46860304",
"0.46778366",
"0.46768996",
"0.46711668",
"0.46687663",
"0.46657252",
"0.46637878"
] | 0.703119 | 0 |
Register a lock to be renewed every extend_interval seconds. | def register(lock, extend_interval)
registration = Registration.new
registration.lock = lock
registration.mutex = Mutex.new
registration.thread = Thread.current
registration.acquired_at = Time.now
registration.extend_interval = extend_interval
registration.released = false
@locks_mutex.synchronize do
locks << registration
end
registration
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extend_lock_if_expires_in_less_than(duration)\n extend_lock if duration > remaining_lock_time\n end",
"def extend_lock!\n Resque.redis.expire(key, timeout)\n end",
"def lock_interval= value\n put_settings(get_settings.tap {|s| s[:lock_interval] = value})\n end",
"def lock_duration=(interval)\n @lock_duration = Core::Utils::Interval.try_convert(interval)\n end",
"def lock_interval\n get_settings[:lock_interval]\n end",
"def relock_every\n @relock_every ||= 5\n end",
"def extend_locks\n # Make a local copy of the locks array to avoid accessing it outside of the mutex.\n locks_copy = @locks_mutex.synchronize { locks.dup }\n locks_copy.each { |registration| extend_lock(registration) }\n @locks_mutex.synchronize do\n locks.delete_if(&:released)\n end\n end",
"def extend(time)\n self.expires_on += time\n self\n end",
"def refresh_lock(ttl)\n check_exists\n SideJob.redis.expire \"#{redis_key}:lock\", ttl\n end",
"def increase_interval\n @interval = if @attempts.zero?\n 1\n elsif @attempts == 1\n 6\n else\n (@interval * @e_factor).to_i\n end\n\n increase_attempts\n end",
"def lock_expired?; end",
"def lock(duration = nil)\n @bridge.lock(duration)\n end",
"def lock_for(duration)\n @locked = true\n @mutex.synchronize { sleep duration }\n @locked = false\n end",
"def retry_later(interval, opts = {})\n is_error = opts.to_h.fetch(:is_error, true)\n\n redis.write(\n gid,\n retries: is_error ? retries + 1 : retries,\n http_request: http_request,\n schedule_time: (Time.now + interval).to_i,\n queue: queue,\n dispatch_deadline: dispatch_deadline\n )\n redis.sadd(self.class.key, [id])\n end",
"def attemptlock(node, lockstring, duration)\n self.locktype = \"\" if (self.locktype.nil?)\n \n lockset = self.locktype.split(' ') \n # logger.debug(\"data_product.attemptlock, lockset[0]=#{lockset[0]}, expired=#{self.lockexpired?} \")\n if (lockset.empty? || self.lockexpired? || (!lockset[0].eql?(\"read\") && !lockset[0].eql?(\"write\")) )\n self.locktype = \"#{lockstring} #{node}\"\n self.save!\n else\n # if it's a read lock, we can extend the expiration and add the new lock\n if lockset[0].eql?(\"read\") && lockstring.eql?(\"read\")\n logger.debug(\" extending read lock\")\n lockset.push(node)\n self.locktype = lockset.join(' ')\n self.lockexpire = duration.seconds.from_now\n self.save!\n end\n end\n self\n end",
"def add_timeout(interval, &block)\n @timer.add(interval, &block)\n end",
"def start_expiry_period!\n self.update_attribute(:access_token_expires_at, Time.now + Devise.timeout_in)\n end",
"def lock_for(duration)\n @mutex.synchronize { sleep duration }\n end",
"def generate_lock\n lock = 'EXTENDEDPROTOCOL'\n [lock + ('ABC' * 6), 'ABCD' * 4]\n end",
"def lock_timeout(duration:)\n add option: \"-lock-timeout=#{duration}\"\n end",
"def login_lock!\n config = sorcery_config\n attributes = {\n config.lock_expires_at_attr_name => Time.current + config.login_lock_time_period,\n config.unlock_token_attr_name => self.class.generate_random_token\n }\n sorcery_orm_adapter.update_attributes(attributes)\n\n if config.unlock_token_mailer_disabled || config.unlock_token_mailer.nil?\n return\n end\n\n send_unlock_token_email!\n end",
"def obtain_lock(name, lock_time)\n lock_end_time = (Time.now.to_f + lock_time + 1)\n redis.set(name, lock_end_time, {ex: lock_time, nx: true})\n end",
"def register\n self.ring_server.write(@tuple, @renewer)\n nil\n end",
"def inc_time_slot\n # ex. with 4 bits -> we want range 1..15, \n @lock.synchronize{@current_slot = (@current_slot % @max_slot) + 1}\n end",
"def lock_timeout; end",
"def start_expiry_period!\n self.update_attribute(:access_token_expires_at, 2.days.from_now)\n end",
"def lock_timeout_retry_delay; end",
"def lock(name, mode)\n yield\n end",
"def reload_lock\n @reload_lock ||= Concurrent::ReadWriteLock.new\n end",
"def enable_lock\n add option: \"-lock=true\"\n end",
"def lock_duration\n to_interval description['LockDuration']\n end",
"def extend_timer\n if(@user.timer.mode == 'manual')\n current_time = params[:current_time]\n new_time = current_time.to_i + @user.config.encounter_extend_duration.to_i\n @timer.current_time = new_time\n @timer.save\n return {new_time: new_time}\n end\n end",
"def onTimeout(interval, &block)\n \n raise ArgumentError unless interval.kind_of? Numeric\n \n ref = {:interval => interval.to_i, :block => block}\n \n with_mutex do \n if @queue.empty?\n @queue << ref\n else \n @queue.each.with_index do |v, i|\n if v[:interval] >= interval\n v[:interval] -= interval\n @queue.insert(i, ref) \n break\n else\n ref[:interval] -= v[:interval] \n if @queue.last == v\n @queue << ref\n break\n end\n end\n end\n end \n @update.push ref \n end\n \n ref\n end",
"def loner_lock_after_execution_period\n @loner_lock_after_execution_period || 0\n end",
"def lock_instance\n @lock_instance ||= lock_class.new(item, after_unlock_hook, @redis_pool)\n end",
"def start_cache_renew_job\n\t\tThread.new do\n\t\t\tloop do\n\t\t\t\tupdate_cache\n\t\t\t\tsleep(CONFIG[\"cache_interval\"]*60) #the cache is valid for this period of time\n\t\t\tend\n\t\tend\n\tend",
"def acquire_lock(opts)\n opts = check_params(opts,[:duration_sec,:comment])\n super(opts)\n end",
"def register(mutex)\n locks.add(mutex)\n end",
"def interval duration, &block\n `setInterval(function() { #{block.call} }, duration * 1000)`\n end",
"def refresh(amount: 30.minutes)\n self[:expires_at] = expires_at + amount\n end",
"def lock(key, expiration)\n timeout = (expiration * 1000).to_i\n if @redis.set(\"SimpleRedisLock:#{key}\", Time.now.strftime('%Y-%m-%d %H:%M:%S.%L %z'), nx: true, px: timeout)\n if block_given?\n begin\n yield\n ensure\n release key\n end\n else\n true\n end\n end\n end",
"def lock=(value)\n doc['lock'] = value\n end",
"def periodically(interval, &block)\n EventMachine::PeriodicTimer.new(interval, &block)\n end",
"def periodically(interval, &block)\n EventMachine::PeriodicTimer.new(interval, &block)\n end",
"def open!\n @ban_until = (Time.now + @interval_time).utc\n end",
"def lock_timeout=(_arg0); end",
"def extend_document_expiration(guid)\n post \"/api/documents/#{guid}/extend_expiration.xml\", {}\n end",
"def create_or_renew_token()\n calculate_expiry_time()\n end",
"def lock_duration\n Core::Utils::Interval.try_convert(@lock_duration)\n end",
"def do_LOCK(req, res)\n end",
"def refresh_lock\n if locked?\n @store.transaction do\n @store[:lockdiscovery].last[:timeout] = timeout_node\n end\n\n fetch_lock_info\n else\n refresh_ancestor_locks @ancestor_path, @ancestors.dup\n end\n end",
"def activate_exemption\n self.registered_on = Date.today\n\n self.expires_on = if transient_registration.is_a? WasteExemptionsEngine::RenewingRegistration\n transient_registration.registration.expires_on +\n WasteExemptionsEngine.configuration.years_before_expiry.years\n else\n Date.today + (WasteExemptionsEngine.configuration.years_before_expiry.years - 1.day)\n end\n\n save!\n end",
"def lock_timeout_retries; end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def test_lock_refresh\n setup_file\n\n lock1 = lock 'file', :timeout => 1000\n assert_in_delta 1000, lock1.timeout, 50\n\n # uncomment sleep to see that refresh does not\n # even reset to 1000\n #sleep 3\n\n response = @request.lock('file', :refresh => true,\n :if => lock1.token, :timeout => 10000)\n assert_equal '200', response.status\n locks = response.lock_discovery.locks\n assert_equal 1, locks.size\n lock2 = locks[lock1.token]\n assert_in_delta 10000, lock2.timeout, 50\n\n # check that propfind shows the lock is refreshed\n response = @request.propfind 'file', 0, :lockdiscovery\n assert_equal '207', response.status\n assert_equal '200', response[:lockdiscovery].status\n locks = response[:lockdiscovery].lock_discovery.locks\n assert_equal 1, locks.size\n lock3 = locks[lock1.token]\n assert_in_delta 10000, lock3.timeout, 100\n\n unlock 'file', lock1.token\n ensure\n teardown_file\n end",
"def password_minutes_of_inactivity_before_lock=(value)\n @password_minutes_of_inactivity_before_lock = value\n end",
"def password_minutes_of_inactivity_before_lock=(value)\n @password_minutes_of_inactivity_before_lock = value\n end",
"def create_periodic_timer(interval, &block)\n Timer.new(self, interval, :periodic => true, &block)\n end",
"def lock!(wait = 0.1)\n until redis.call(\"SETNX\", key[:_lock], lock_timeout) == 1\n next unless lock = redis.call(\"GET\", key[:_lock])\n sleep(wait) and next unless lock_expired?(lock)\n\n break unless lock = redis.call(\"GETSET\", key[:_lock], lock_timeout)\n break if lock_expired?(lock)\n end\n end",
"def test_acquire_lock_with_expiry_handler_class_method\n b = Patient.first\n # Get an ordinary lock for the record\n PessimisticLock.destroy_all\n assert PessimisticLock.acquire([b], 'ordinary_holder', 'testing one time lock', :expiry_handler => 'test expiry handler')\n refute b.acquire_one_time_pessimistic_lock('testing one time lock')\n\n # Expire the lock\n expired = 20.minutes.ago\n PessimisticLock.update_all(updated_at:expired)\n\n # read the lock back before deleting expired\n locks = PessimisticLock.all\n assert_equal 1, locks.length\n lock = locks.first\n assert (expired.to_i - lock.updated_at.to_i).abs < 10 # don't care about +- some seconds\n\n PessimisticLock.delete_expired!\n\n assert_equal lock, PessimisticLock.first # is still there and hasn't canged\n end",
"def lock\n\t\t@lock += 1\n\t\treturn self\n\tend",
"def renew_account_until(account_descr, expiration_time, authorizer)\n if account_descr.is_a?(OMF::SFA::Model::Account)\n account = account_descr\n else\n account = find_account(account_descr, authorizer)\n end\n raise InsufficientPrivilegesException unless authorizer.can_renew_account?(account, expiration_time)\n\n account.open if account.closed?\n account.valid_until = expiration_time\n account.save\n # Ask the corresponding RC to create/re-open an account\n @liaison.create_account(account)\n\n account\n end",
"def with_app_lock( &block )\n # acquire lock_expiration\n ok = with_connection_lock do |locked_self|\n if locked_self.lock_expiration.nil? then\n row.update_all lock_expiration: Time.now + DELTA\n true\n end\n end\n # use and release lock_expiration outside of the connection_lock\n if ok then\n begin\n block.call\n ensure\n row.update_all lock_expiration: nil\n end\n end\n end",
"def update_interval\n self.interval =\n case repetitions\n when 1 then 1\n when 2 then 6\n else\n (repetitions - 1) * easiness_factor\n end\n end",
"def now_and_after(interval, &block); end",
"def register(key, &block)\n delay = Delay.new(executor: :immediate, &block)\n update_data { |h| h.merge(key => delay) }\n self\n end",
"def register_with_backoff(url, entity, iteration, threshold)\n # Checking a greater iteration just to be on the safe side.\n unless iteration > threshold or threshold <= 0 or iteration < 0\n sleep((1.0/2.0*(2.0**iteration - 1.0)).ceil) if iteration > 0\n success, _ = _put(url, entity.to_json)\n unless success # Unless we successfully registered.\n if threshold == iteration\n logger.error(\"Unable to complete registration after #{threshold + 1} attempts\")\n return false\n else\n # Attempt to register again using the exponential backoff.\n logger.warn(\"Unable to complete registration after #{iteration + 1} attempts, Retrying up to #{threshold+1} attempts\")\n register_with_backoff(url, entity, iteration + 1, threshold)\n end\n end\n return true\n end\n false\n end",
"def lock(klass, field, role, only=nil)\n @definitions[klass][field] << { role: role, only: only }\n end",
"def acquire\n return unless @running\n\n @lock.acquire.callback {\n if !@locked\n @onlocked.call if @onlocked\n @locked = true\n end\n\n # Re-acquire lock near the end of the period\n @extend_timer = EM.add_timer(@timeout.to_f * 2 / 3) {\n acquire()\n }\n }.errback { |e|\n if @locked\n # We were previously locked\n @onunlocked.call if @onunlocked\n @locked = false\n end\n\n if e.kind_of?(EM::Hiredis::RedisError)\n err = e.redis_error\n EM::Hiredis.logger.warn \"Unexpected error acquiring #{@lock} #{err}\"\n end\n\n @retry_timer = EM.add_timer(@retry_timeout) {\n acquire() unless @locked\n }\n }\n end",
"def add_periodic_timer(interval, callback=nil, &blk)\n @timers ||= {}\n timer = PeriodicTimer.new(interval,callback,&blk)\n timer.on(:cancelled) do\n @timers.delete(timer)\n end\n @timers[timer.object_id] = timer\n timer.object_id\n end",
"def lock_timeout_retry_delay=(_arg0); end",
"def register_pledge_alert\n if payment_in? && payment_in_changed?\n d1 = (payment_in - 1.day).beginning_of_day\n d2 = payment_in.beginning_of_day\n self.delay(run_at: d1).send_pledge_alert(payment_in, false) if d1 >= Time.current\n self.delay(run_at: d2).send_pledge_alert(payment_in, true) if d2 >= Time.current\n end\n end",
"def subscribe interval=1\n\t\t\treturn if @update_timer\n\n\t\t\tupdate_proc = proc {\n\t\t\t\tupdate do |status, result|\n\t\t\t\t\t@update_timer = EM::Timer.new(interval, update_proc) if @update_timer\n\t\t\t\tend\n\t\t\t}\n\n\t\t\t@update_timer = EM::Timer.new(interval, update_proc)\n\t\tend",
"def every(interval_sec, &block)\n # to allow canceling the periodic timer we need to\n # hand back a reference to it which responds to 'cancel'\n # As this is getting rather complex when allowing for\n # registration before the EM is up and running, we simply throw\n # and exception at this time.\n raise \"Can't handle 'every' registration before the EM is up\" unless EM.reactor_running?\n # if EM.reactor_running?\n # EM.add_periodic_timer(interval_sec, &block)\n # else\n # @deferred << lambda do\n # EM.add_periodic_timer(interval_sec, &block)\n # end\n # end\n t = EM.add_periodic_timer(interval_sec) do\n begin\n block.call(t)\n rescue => ex\n error \"Exception '#{ex}'\"\n debug \"#{ex}\\n\\t#{ex.backtrace.join(\"\\n\\t\")}\"\n end\n end\n t\n end",
"def lock!; end",
"def lock\n self.is_locked = true\n self\n end",
"def expires_in\n @lock_expire.to_f - Time.now.to_f if @lock_expire && owner_ident == @locked_owner_id\n end",
"def lock!\n @locked = true\n end",
"def register!\n @planet.next_raid_at += Cfg.raiding_delay_random\n @planet.raid_arg = generate_arg\n CallbackManager.register_or_update(\n @planet, CallbackManager::EVENT_RAID, @planet.next_raid_at\n )\n @planet.delayed_fire(@planet, EventBroker::CHANGED,\n EventBroker::REASON_OWNER_PROP_CHANGE)\n @planet.save!\n end",
"def lock!\n freeze!\n @locked = true\n self\n end",
"def extend()\n # Get the appointment\n appointment = current_user.appointments.last\n student_id = current_user.id\n tutor_id = appointment.tutor.id\n # Get the sidekiq job\n jids = appointment.jids.split('|')\n jid_reminder = jids[0]\n jid_complete = jids[1]\n job_reminder = Sidekiq::ScheduledSet.new.find_job(jid_reminder)\n job_complete = Sidekiq::ScheduledSet.new.find_job(jid_complete)\n complete_new_time = job_complete.at + Settings.call_extend_time\n reminder_new_time = complete_new_time - Settings.call_speak_reminder_time\n\n if job_complete.reschedule(complete_new_time) &&\n job_reminder.reschedule(reminder_new_time)\n # update the appointment cost and call time\n appointment.update_attribute(:amount, appointment.amount + Settings.call_extend_cost)\n appointment.update_attribute(:tutor_earned, appointment.tutor_earned + Settings.call_extend_earned)\n # notify the student and the tutor\n msg = I18n.t('appointment.conference_room.call_extend', \n time: Settings.call_extend_time)\n MessageBroadcastJob.perform_later(msg, 'notification',\n student_id: student_id,\n tutor_id: tutor_id)\n else\n msg = I18n.t('students.errors.appointment.call_extend')\n MessageBroadcastJob.perform_later(msg, 'notification',\n student_id: student_id)\n end\n end",
"def now_and_every(interval, recur = T.unsafe(nil), &block); end",
"def passcode_minutes_of_inactivity_before_lock=(value)\n @passcode_minutes_of_inactivity_before_lock = value\n end",
"def every(interval, &block)\n Timer.new(self, interval, true, block)\n end",
"def write_lock\n FileUtils.touch(@lock_file)\n end",
"def lock_timeout_retries=(_arg0); end",
"def refresh(expire_timeout=nil)\n if @lock_expire && owner_ident == (lock_full_ident = @locked_owner_id)\n lock_expire = (Time.now + (expire_timeout.to_f.nonzero? || self.expire_timeout)).to_f\n !!if 1 == eval_safe(@eval_refresh, @ns_names, [lock_full_ident, (lock_expire*1000.0).to_i])\n @lock_expire = lock_expire\n end\n else\n false\n end\n end",
"def lock_id(locker_uid, id_to_lock, expire_in_sec = 5)\n now = redis_time\n @con.multi do\n @con.zadd(\"#{@lock_map_key}_z\", (now + expire_in_sec), id_to_lock)\n @con.hset(\"#{@lock_map_key}_h\", id_to_lock, locker_uid)\n end\n end",
"def lock(key, &block)\n\n kl = \"#{key}-lock\"\n\n loop do\n\n break if @redis.setnx(kl, Time.now.to_f.to_s) != false\n # locking successful\n\n #\n # already locked\n\n t = @redis.get(kl)\n\n @redis.del(kl) if t && Time.now.to_f - t.to_f > 60.0\n # after 1 minute, locks time out\n\n sleep 0.007 # let's try to lock again after a while\n end\n\n #@redis.expire(kl, 2)\n # this doesn't work, it makes the next call to setnx succeed\n\n result = block.call\n\n @redis.del(kl)\n\n result\n end",
"def refresh!\n with_lock do\n update_counters!\n send_email_if_needed!\n save!\n end\n self # for chaining\n end",
"def lock!\n @locked = true\n end",
"def refresh_time\n self.update_column( :expires, Time.zone.now + TOKEN_LIFE )\n end",
"def expire(key, seconds, **kwargs); end",
"def refresh_expiry\n self.expires_at = Time.now + ttl\n end",
"def lock(&block)\n # TODO: only use replace strategy when server is executing the lock\n return call_strategy unless (locked_token = locksmith.lock(&block))\n\n locked_token\n end",
"def lock_list\n super\n end",
"def lock\n end",
"def add_interval(xml, options)\n interval = options[:interval]\n return unless interval\n\n xml.tag!('interval') do\n # The measurement of time, in association with the Interval Unit,\n # that is used to define the frequency of the billing occurrences\n xml.tag!('length', interval[:length])\n # The unit of time, in association with the Interval Length,\n # between each billing occurrence\n xml.tag!('unit', interval[:unit].to_s)\n end\n end"
] | [
"0.6693901",
"0.6399278",
"0.6149864",
"0.6066555",
"0.60389113",
"0.5899571",
"0.58183295",
"0.5671344",
"0.55594057",
"0.5495701",
"0.54651004",
"0.54388404",
"0.5399389",
"0.53882456",
"0.5364531",
"0.5355837",
"0.5348644",
"0.52948856",
"0.5271662",
"0.52258074",
"0.5221512",
"0.5196897",
"0.51821786",
"0.51673836",
"0.5159645",
"0.51464814",
"0.5140068",
"0.5126113",
"0.512588",
"0.511193",
"0.5101996",
"0.50894445",
"0.50886935",
"0.50859463",
"0.5061532",
"0.5058487",
"0.5032323",
"0.5014928",
"0.5003608",
"0.4999676",
"0.4995771",
"0.49899188",
"0.49456185",
"0.49456185",
"0.49446175",
"0.4942664",
"0.4933859",
"0.4921339",
"0.49151483",
"0.49093848",
"0.4905411",
"0.48984474",
"0.48956233",
"0.4893816",
"0.4893816",
"0.4893816",
"0.48897338",
"0.48895362",
"0.48895362",
"0.48843005",
"0.48829",
"0.48810837",
"0.48656768",
"0.4865447",
"0.48580602",
"0.48479223",
"0.4841472",
"0.48375463",
"0.48370412",
"0.4836602",
"0.4827904",
"0.48253873",
"0.48244008",
"0.482194",
"0.48127705",
"0.48118278",
"0.48105043",
"0.48099187",
"0.4809568",
"0.48068115",
"0.47969264",
"0.4792737",
"0.4790578",
"0.4788368",
"0.47849494",
"0.47832057",
"0.47817025",
"0.4780976",
"0.47803596",
"0.47729746",
"0.47594538",
"0.4755077",
"0.47520438",
"0.47467944",
"0.47466",
"0.47285673",
"0.47215244",
"0.47212037",
"0.47133148",
"0.47058746"
] | 0.8018224 | 0 |
Extend all currently registered locks that have been held longer than the extend_interval since they were last acquired/extended. If any locks have expired (should not happen), it will release them. | def extend_locks
# Make a local copy of the locks array to avoid accessing it outside of the mutex.
locks_copy = @locks_mutex.synchronize { locks.dup }
locks_copy.each { |registration| extend_lock(registration) }
@locks_mutex.synchronize do
locks.delete_if(&:released)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extend_lock_if_expires_in_less_than(duration)\n extend_lock if duration > remaining_lock_time\n end",
"def extend_lock!\n Resque.redis.expire(key, timeout)\n end",
"def register(lock, extend_interval)\n registration = Registration.new\n registration.lock = lock\n registration.mutex = Mutex.new\n registration.thread = Thread.current\n registration.acquired_at = Time.now\n registration.extend_interval = extend_interval\n registration.released = false\n @locks_mutex.synchronize do\n locks << registration\n end\n registration\n end",
"def unlock_all!\n locks.active.each(&:unlock!)\n end",
"def refresh_lock\n if locked?\n @store.transaction do\n @store[:lockdiscovery].last[:timeout] = timeout_node\n end\n\n fetch_lock_info\n else\n refresh_ancestor_locks @ancestor_path, @ancestors.dup\n end\n end",
"def relock_every\n @relock_every ||= 5\n end",
"def unlock_all\n Delayed::Job.transaction do\n Delayed::Job.where(:locked_by => hostname).update_all(:locked_by => nil, :locked_at => nil)\n end\n end",
"def unlock_all\n update_all('$set': { locking_name_field => nil, locked_at_field => nil }).modified_count\n end",
"def lock_expired?; end",
"def lock_list\n super\n end",
"def exclusive_unlock\n\t\t\treturn unless @locked\n\t\t\tif @nested_locks > 0\n\t\t\t\t@nested_locks -= 1\n\t\t\telse\n\t\t\t\tThread.exclusive do\n\t\t\t\t\t@locked = false\n\t\t\t\t\tbegin\n\t\t\t\t\t\tt = @waiting.pop\n\t\t\t\t\t\tt.wakeup if t\n\t\t\t\t\trescue ThreadError\n\t\t\t\t\t\tretry\n\t\t\t\t\tend\n\t\t\t\t\tyield\n\t\t\t\tend\n\t\t\tend\n\t\t\tself\n\t\tend",
"def grow\n @lock.synchronize do\n prune\n logger.debug { \"jobs: #{num_jobs}; busy: #{num_working}; idle: #{num_idle}\" }\n if @size == 0 || (@size < @max_size && num_jobs > 0 && num_jobs > num_idle) then\n space = @max_size-@size\n jobs = num_jobs-num_idle\n needed = space < jobs ? space : jobs\n needed = 1 if needed <= 0\n expand(needed)\n else\n logger.debug \"NOT growing the pool!\"\n end\n end\n\n nil\n end",
"def supported_locks\n []\n end",
"def lock_interval\n get_settings[:lock_interval]\n end",
"def locks\n sql = %q(\n select\n pg_stat_activity.procpid,\n pg_class.relname,\n pg_locks.transactionid,\n pg_locks.granted,\n substr(pg_stat_activity.current_query,1,30) as query_snippet,\n age(now(),pg_stat_activity.query_start) as \"age\"\n from pg_stat_activity,pg_locks left\n outer join pg_class on (pg_locks.relation = pg_class.oid)\n where pg_stat_activity.current_query <> '<insufficient privilege>' and\n pg_locks.pid=pg_stat_activity.procpid and pg_locks.mode = 'ExclusiveLock' order by query_start)\n\n exec_sql(sql, find_uri)\n end",
"def extend()\n # Get the appointment\n appointment = current_user.appointments.last\n student_id = current_user.id\n tutor_id = appointment.tutor.id\n # Get the sidekiq job\n jids = appointment.jids.split('|')\n jid_reminder = jids[0]\n jid_complete = jids[1]\n job_reminder = Sidekiq::ScheduledSet.new.find_job(jid_reminder)\n job_complete = Sidekiq::ScheduledSet.new.find_job(jid_complete)\n complete_new_time = job_complete.at + Settings.call_extend_time\n reminder_new_time = complete_new_time - Settings.call_speak_reminder_time\n\n if job_complete.reschedule(complete_new_time) &&\n job_reminder.reschedule(reminder_new_time)\n # update the appointment cost and call time\n appointment.update_attribute(:amount, appointment.amount + Settings.call_extend_cost)\n appointment.update_attribute(:tutor_earned, appointment.tutor_earned + Settings.call_extend_earned)\n # notify the student and the tutor\n msg = I18n.t('appointment.conference_room.call_extend', \n time: Settings.call_extend_time)\n MessageBroadcastJob.perform_later(msg, 'notification',\n student_id: student_id,\n tutor_id: tutor_id)\n else\n msg = I18n.t('students.errors.appointment.call_extend')\n MessageBroadcastJob.perform_later(msg, 'notification',\n student_id: student_id)\n end\n end",
"def with_app_lock( &block )\n # acquire lock_expiration\n ok = with_connection_lock do |locked_self|\n if locked_self.lock_expiration.nil? then\n row.update_all lock_expiration: Time.now + DELTA\n true\n end\n end\n # use and release lock_expiration outside of the connection_lock\n if ok then\n begin\n block.call\n ensure\n row.update_all lock_expiration: nil\n end\n end\n end",
"def lock_changes\n begin\n @lock_count += 1\n yield\n ensure\n @lock_count -= 1\n end\n end",
"def lock_timeout_retries; end",
"def unlock\n\t\t\treturn unless @locked\n\t\t\tif @nested_locks > 0\n\t\t\t\t@nested_locks -= 1\n\t\t\telse\n\t\t\t\tThread.critical = true\n\t\t\t\t@locked = false\n\t\t\t\tbegin\n\t\t\t\t\tt = @waiting.pop\n\t\t\t\t\tt.wakeup if t\n\t\t\t\trescue ThreadError\n\t\t\t\t\tretry\n\t\t\t\tend\n\t\t\t\tThread.critical = false\n\t\t\t\tbegin\n\t\t\t\t\tt.run if t\n\t\t\t\trescue ThreadError\n\t\t\t\tend\n\t\t\tend\n\t\t\tself\n\t\tend",
"def db_lock(id, duration = 5)\n start = Time.now\n locks = @database.from(:locks)\n while true\n if start < Time.now - duration\n @logger.error(\"Could not acquire a lock for #{id}\")\n # TO-DO: Safeguard to prune old locks\n # This will be necessary if deadlocks are encountered\n # locks.where { created_at < (Time.now - duration) }.delete\n break\n end\n begin\n locks.insert(id: id, created_at: Time.now)\n @database.transaction do\n yield\n end\n break\n rescue Sequel::UniqueConstraintViolation\n sleep(0.1)\n ensure\n locks.where(id: id).delete\n end\n end\n end",
"def lock_duration=(interval)\n @lock_duration = Core::Utils::Interval.try_convert(interval)\n end",
"def lock_interval= value\n put_settings(get_settings.tap {|s| s[:lock_interval] = value})\n end",
"def locks(uri, return_child_locks)\n new_locks = []\n\n locks = data\n\n locks.each do |lock|\n next unless lock.uri == uri ||\n # deep locks on parents\n (lock.depth != 0 && uri.index(\"#{lock.uri}/\") == 0) ||\n # locks on children\n (return_child_locks && lock.uri.index(\"#{uri}/\") == 0)\n new_locks << lock\n end\n\n # Checking if we can remove any of these locks\n new_locks.each_with_index do |lock, k|\n new_locks.delete_at(k) if Time.now.to_i > lock.timeout + lock.created\n end\n new_locks\n end",
"def queue_lock_timeout\n 3600\n end",
"def resource_locks\n :all\n end",
"def with_lock\n self.lock = 1\n yield\n self.lock = 0\n queue_available_jobs\n end",
"def lock_expired?\n if unlock_strategy_enabled?(:time)\n locked_at && locked_at < self.class.unlock_in.ago\n else\n false\n end\n end",
"def refresh_lock(ttl)\n check_exists\n SideJob.redis.expire \"#{redis_key}:lock\", ttl\n end",
"def unlock\n @locking = false\n end",
"def requeue_timed_out_jobs\n # older than x\n jobs.where{started_working_at <= (Time.now - EQ.config.job_timeout)}\\\n .update(started_working_at: nil)\n end",
"def lock_timeout; end",
"def lock_for(duration)\n @locked = true\n @mutex.synchronize { sleep duration }\n @locked = false\n end",
"def lock_registered_jarfiles(*args, &blk)\n jarfiles = registered_jarfiles\n return if jarfiles.empty?\n instances = jarfiles.map do |jarfile, spec|\n if spec\n LockJar::Domain::GemDsl.create spec, jarfile\n else\n LockJar::Domain::JarfileDsl.create jarfile\n end\n end\n combined = instances.reduce do |result, inst|\n LockJar::Domain::DslMerger.new(result, inst).merge\n end\n args = args.reject { |arg| arg.is_a? String }\n lock(combined, *args, &blk)\n end",
"def lock_timeout_retry_delay; end",
"def resend_unlock_instructions\n if_access_locked { send_unlock_instructions }\n end",
"def test_acquire_lock_with_expiry_handler_class_method\n b = Patient.first\n # Get an ordinary lock for the record\n PessimisticLock.destroy_all\n assert PessimisticLock.acquire([b], 'ordinary_holder', 'testing one time lock', :expiry_handler => 'test expiry handler')\n refute b.acquire_one_time_pessimistic_lock('testing one time lock')\n\n # Expire the lock\n expired = 20.minutes.ago\n PessimisticLock.update_all(updated_at:expired)\n\n # read the lock back before deleting expired\n locks = PessimisticLock.all\n assert_equal 1, locks.length\n lock = locks.first\n assert (expired.to_i - lock.updated_at.to_i).abs < 10 # don't care about +- some seconds\n\n PessimisticLock.delete_expired!\n\n assert_equal lock, PessimisticLock.first # is still there and hasn't canged\n end",
"def lock_duration\n to_interval description['LockDuration']\n end",
"def lock_timeout(duration:)\n add option: \"-lock-timeout=#{duration}\"\n end",
"def lock_timeout_limit=(_arg0); end",
"def lock_exclusively!(max_run_time, worker = worker_name)\n now = Time.now\n affected_rows = if locked_by != worker\n # We don't own this job so we will update the locked_by name and the locked_at\n # self.class.update_all([\"locked_at = ?, locked_by = ?\", now, worker], [\"id = ? and (locked_at is null or locked_at < ?)\", id, (now - max_run_time.to_i)])\n klass = self.class\n (klass.all(:id => id) & (klass.all(:locked_at => nil) | klass.all(:locked_at => now))).update(:locked_at => now, :locked_by => worker)\n else\n # We already own this job, this may happen if the job queue crashes.\n # Simply resume and update the locked_at\n # self.class.update_all([\"locked_at = ?\", now], [\"id = ? and locked_by = ?\", id, worker])\n self.class.all(:id => id, :locked_by => worker).update(:locked_at => now)\n end\n if affected_rows == true\n self.locked_at = now\n self.locked_by = worker\n return true\n else\n return false\n end\n end",
"def release_lock\n super\n end",
"def onTimeout(interval, &block)\n \n raise ArgumentError unless interval.kind_of? Numeric\n \n ref = {:interval => interval.to_i, :block => block}\n \n with_mutex do \n if @queue.empty?\n @queue << ref\n else \n @queue.each.with_index do |v, i|\n if v[:interval] >= interval\n v[:interval] -= interval\n @queue.insert(i, ref) \n break\n else\n ref[:interval] -= v[:interval] \n if @queue.last == v\n @queue << ref\n break\n end\n end\n end\n end \n @update.push ref \n end\n \n ref\n end",
"def lock_for(duration)\n @mutex.synchronize { sleep duration }\n end",
"def expire_auth_related_fragments\n expire_download_activity\n expire_create_activity\n expire_resource_list_item_action_partial\n end",
"def waitlist_allocation\n if capacity.changed? or new_record?\n selected_for_allocation = event.waitlist.limit(capacity - candidates.count)\n if selected_for_allocation and candidates.count < capacity\n waitlist_update(selected_for_allocation)\n end\n end\n end",
"def enroll_from_waitlist_as_needed\n\n # Very dumb lock\n @@currently_enrolling = true\n\n # Get tour that some booking was just destroyed for\n tour = self.tour\n\n # We can possibly enroll if there are available seats, and waitlisted seats\n if Booking.get_available_seats_for_tour(tour).positive? && Waitlist.get_waitlisted_seats_for_tour(tour).positive?\n # Iterate over all waitlists for this tour\n Waitlist.get_waitlists_for_tour_first_come_first_served(tour).each do |waitlist|\n\n # Since we are in a loop, re-check some stuff each time\n # Available seats in the tour may have been modified so check that again\n # Waitlist may have been destroyed (but not quite totally 100% gone yet)\n # Honestly I don't understand how a \"destroyed\" waitlist can be returned\n # from get_waitlists_for_tour_first_come_first_served,\n # but I am seeing behavior that makes me pretty sure this is happening\n # Here we have multiple levels of trying to NOT see a waitlist we have destroyed\n if waitlist.num_seats <= Booking.get_available_seats_for_tour(tour)\n\n # We can book all of these seats\n # We need a booking\n # If there was a booking already, update it\n # If there was not a booking already, create it\n # We do not need a waitlist (destroy it)\n associated_booking = waitlist.booking_same_user_same_tour\n if associated_booking\n already_booked = associated_booking.num_seats\n associated_booking.update(\n num_seats: already_booked + waitlist.num_seats\n )\n else\n associated_booking = Booking.new(\n num_seats: waitlist.num_seats,\n user_id: waitlist.user_id,\n tour_id: waitlist.tour_id\n )\n end\n\n # FIRST destroy the waitlist THEN save the new or updated booking\n # to help protect against seats from a waitlist being added to booking more than once\n waitlist.destroy\n associated_booking.save\n\n end\n end\n\n end\n\n # Very dumb lock\n @@currently_enrolling = false\n\n end",
"def lock_timeout_retries=(_arg0); end",
"def commit_and_release_locks\n return System.commit_and_release_locks\n end",
"def unlock; end",
"def work\n stat :attempting_lock_on, item_id: object_id\n if @mutex.try_lock\n stat :has_lock_on, item_id: object_id\n chore\n stat :releasing_lock_on, item_id: object_id\n @mutex.unlock\n else\n stat :bailed_on, item_id: object_id\n end\n end",
"def unlock\n if (!@owner.nil?)\n if (@owner[:locks].last == self)\n @owner[:locks].pop\n else\n if @owner[:locks].delete(self)\n @out_of_order_locks += 1\n raise EMutexOrder.new(self, \"Expected #{@owner[:locks].last}\")\n end\n # if called again let it pass\n end\n @owner = nil\n end\n super\n end",
"def cache_expiry\n if options[:extend_cache_life]\n (1 + options[:extend_cache_life]) * super\n else\n super\n end\n end",
"def with_lock_retries(*args, **kwargs, &block)\n if transaction_open?\n if enable_lock_retries?\n Gitlab::AppLogger.warn 'Lock retries already enabled, executing the block directly'\n yield\n else\n raise <<~EOF\n #{__callee__} can not be run inside an already open transaction\n\n Use migration-level lock retries instead, see https://docs.gitlab.com/ee/development/migration_style_guide.html#retry-mechanism-when-acquiring-database-locks\n EOF\n end\n else\n super(*args, **kwargs.merge(allow_savepoints: false), &block)\n end\n end",
"def unlock\n\t\tif (@lock == 0) \n\t\t\traise \"Cannot unlock unlocked engine\"\n\t\tend\n\t\t@lock -= 1\n\n\t\twhile (@lock == 0) \n\t\t\tactor = @scheduler.next()\n\t\t\tif (actor == nil) \n\t\t\t\treturn self.lock() # /* no actors */\n\t\t\tend\n\t\t\tresult = actor[:act].call\n\t\t\t# if (result != nil && result[:then] != nil) #/* actor returned a \"thenable\", looks like a Promise */\n\t\t\t# \tself.lock()\n\t\t\t# \tresult.then(self.unlock.bind(self))\n\t\t\t# end\n\t\tend\n\n\t\treturn self\n\tend",
"def lock(duration = nil)\n @bridge.lock(duration)\n end",
"def update_remaining_appointments!(time_to_add)\n remaining_appointments_today.each do |appt|\n appt.update_attribute(:appointment_delayed_time,\n appt.appointment_delayed_time + time_to_add.minutes)\n appt.send_delay_email\n appt.push_delay_notification\n end\n end",
"def loner_lock_after_execution_period\n @loner_lock_after_execution_period || 0\n end",
"def lock_exclusively!(max_run_time, worker = worker_name)\n now = self.class.db_time_now\n affected_rows = if locked_by != worker\n # We don't own this job so we will update the locked_by name and the locked_at\n self.class.update_all([\"locked_at = ?, locked_by = ?\", now, worker], [\"id = ? and (locked_at is null or locked_at < ?)\", id, (now - max_run_time.to_i)])\n else\n # We already own this job, this may happen if the job queue crashes.\n # Simply resume and update the locked_at\n self.class.update_all([\"locked_at = ?\", now], [\"id = ? and locked_by = ?\", id, worker])\n end\n if affected_rows == 1\n self.locked_at = now\n self.locked_by = worker\n return true\n else\n return false\n end\n end",
"def locks\n SidekiqUniqueJobs.locks\n end",
"def refresh\n add_queue = nil\n delete_queue = nil\n \n @queues_mutex.synchronize do\n add_queue = Array.new(@add_queue)\n delete_queue = Array.new(@delete_queue)\n @add_queue.clear\n @delete_queue.clear\n end\n \n @monitor.synchronize do\n modified = ! add_queue.empty? || ! delete_queue.empty?\n delete_queue.each { |file| FileUtils.rm_f(file) }\n \n add_queue.each do |package, dest_path|\n package.lock do\n FileUtils.mkdir_p(File.dirname(dest_path))\n FileUtils.mv(package.file, dest_path, :force => true)\n package.update(:file => dest_path)\n end\n end\n yield(modified)\n end\n end",
"def sweeper\n Thread.new(@expiries, @available) do |exp, avail|\n loop do\n sleep 15\n @lock.synchronize {\n avail.each do |instance|\n if exp[instance] < Time.now\n avail.delete(instance)\n exp.delete(instance)\n instance.close\n end\n end\n }\n end\n end\n end",
"def unlock\n self.is_locked = false\n self\n end",
"def update_counters(id, counters)\n counters = counters.merge(locking_column => 1) if locking_enabled?\n super\n end",
"def lock_duration\n Core::Utils::Interval.try_convert(@lock_duration)\n end",
"def refresh(expire_timeout=nil)\n if @lock_expire && owner_ident == (lock_full_ident = @locked_owner_id)\n lock_expire = (Time.now + (expire_timeout.to_f.nonzero? || self.expire_timeout)).to_f\n !!if 1 == eval_safe(@eval_refresh, @ns_names, [lock_full_ident, (lock_expire*1000.0).to_i])\n @lock_expire = lock_expire\n end\n else\n false\n end\n end",
"def with_multilock(keys)\n # We're going to remove nil values from the passed in keys, since we can't lock on nil.\n # Cast the passed-in object to an array so we can handle a Set.\n # Make a copy of the keys array so we don't modify the passed-in object.\n keys = keys.nil? ? [] : keys.to_a.dup.compact\n\n # If no keys have been passed in, just yield and return.\n # This simplifies things for any calling code that wants to pass in a\n # variable number of dependent lock-needing resources when there's a\n # possibility that certain situations may not require any locks at all.\n if keys.blank?\n yield Hash.new\n return\n end\n\n raise ArgumentError, \"Duplicate object id found in given keys: #{keys.join(', ')}\" if keys.uniq.length != keys.length\n lock_objects = {}\n already_locked_ids = []\n\n keys.each do |key|\n lock_objects[key] = lock(key)\n rescue Hyacinth::Exceptions::LockError\n already_locked_ids << key\n end\n\n if already_locked_ids.present?\n # unlock any locks we just established\n lock_objects.each do |_key, lock_object|\n lock_object.unlock\n end\n # and then raise an exception\n raise Hyacinth::Exceptions::LockError, already_locked_ids.length == 1 ?\n \"Lock on #{already_locked_ids.first} is currently held by another process.\" :\n \"Locks on #{already_locked_ids.join(', ')} are currently held by other processes.\"\n end\n\n # TODO: Write a test to ensure that the locks are unlocked if the given block raises an exception.\n begin\n # yield lock_objects so that given block can extend the locks if necessary\n yield lock_objects\n ensure\n # Unlock lock_objects now that we're done with them\n lock_objects.each do |_key, lock_object|\n lock_object.unlock\n end\n end\n end",
"def lock!(wait = 0.1)\n until redis.call(\"SETNX\", key[:_lock], lock_timeout) == 1\n next unless lock = redis.call(\"GET\", key[:_lock])\n sleep(wait) and next unless lock_expired?(lock)\n\n break unless lock = redis.call(\"GETSET\", key[:_lock], lock_timeout)\n break if lock_expired?(lock)\n end\n end",
"def expires_in\n @lock_expire.to_f - Time.now.to_f if @lock_expire && owner_ident == @locked_owner_id\n end",
"def with_exp_backoff(max_retries)\n return unless block_given?\n\n retries = 0\n begin\n yield\n rescue Lokalise::Error::TooManyRequests => e\n raise(e.class, \"Gave up after #{retries} retries\") if retries >= max_retries\n\n sleep 2**retries\n retries += 1\n retry\n end\n end",
"def unlock(*args, &block)\n map_method(:unlock, args, &block)\n end",
"def unlock\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock!\n map.each do |key, matchers|\n matchers.each do |matcher|\n matcher[:locked] = true\n end\n end\n @locked = true\n end",
"def lock_exclusively!(max_run_time, worker)\n now = self.class.db_time_now\n if locked_by != worker\n # We don't own this job so we will update the locked_by name and the locked_at\n self.locked_at = now\n self.locked_by = worker\n end\n\n true\n end",
"def flush_expired\n if gc_last && gc_time && gc_last+gc_time <= Time.now\n flush_expired!\n end\n end",
"def before_perform_workers_lock(*args)\n if lock_workers(*args)\n workers_lock = get_lock_workers(*args)\n if Resque.redis.incr(workers_lock) <= concurrent_workers(*args)\n Resque.redis.expire(workers_lock, worker_lock_timeout(*args))\n elsif\n count = Resque.redis.decr(workers_lock) \n if count.to_i > 0 && Resque.redis.ttl(workers_lock) < 0\n Resque.redis.expire(workers_lock, worker_lock_timeout(*args))\n end\n sleep(requeue_perform_delay)\n Resque.enqueue(self, *args)\n raise Resque::Job::DontPerform\n end\n end\n end",
"def locks\n dataset.from(:pg_class).join(:pg_locks, :relation=>:relfilenode).select{[pg_class[:relname], Sequel::SQL::ColumnAll.new(:pg_locks)]}\n end",
"def lock_exclusively!(max_run_time, worker)\n now = self.class.db_time_now\n \n #whether this job has run before in the past\n first_time=self.first_started_at.nil?\n\n #attributes to modify in the job table\n conditions=\"locked_at = ?, last_started_at = ?\"\n attrs=[now,now]\n\n #if it hasn't been run, then we want to also update first_started_at\n if first_time\n conditions+=\", first_started_at = ?\"\n attrs << now\n end\n\n if locked_by != worker\n # We don't own this job so we will also update the locked_by name\n conditions+=\", locked_by = ?\"\n attrs.unshift(conditions)\n attrs << worker\n affected_rows = self.class.update_all(attrs,\n [\"id = ? and (locked_at is null or locked_at < ?) and (run_at <= ?)\", id, (now - max_run_time.to_i), now])\n else\n # We already own this job, this may happen if the job queue crashes.\n # Simply resume and update the locked_at\n attrs.unshift(conditions)\n affected_rows = self.class.update_all(attrs, [\"id = ? and locked_by = ?\", id, worker])\n end\n\n if affected_rows == 1\n #update the attributes to the same values that were set in the database\n self.locked_at = now\n self.last_started_at = now\n self.first_started_at ||= now\n self.locked_by = worker\n return true\n else\n return false\n end\n end",
"def unlock\n locksmith.unlock # Only signal to release the lock\n end",
"def unlock\n self.locked_at = nil\n self.locked_by = nil\n end",
"def extend(time)\n self.expires_on += time\n self\n end",
"def enable_lock\n add option: \"-lock=true\"\n end",
"def resend_unlock_instructions; end",
"def cleanup\n @keys.each { |key, time|\n if expired_kalive?(key, 300)\n delete_key(key)\n end\n\n if expired_unlocked?(key, 60)\n unblock_key(key)\n end\n }\n \n end",
"def free_lock\n session.execute(@free_lock_stmt, workspace_id, worker_id).each do |row|\n return row['[applied]']\n end\n end",
"def test_lock_refresh\n setup_file\n\n lock1 = lock 'file', :timeout => 1000\n assert_in_delta 1000, lock1.timeout, 50\n\n # uncomment sleep to see that refresh does not\n # even reset to 1000\n #sleep 3\n\n response = @request.lock('file', :refresh => true,\n :if => lock1.token, :timeout => 10000)\n assert_equal '200', response.status\n locks = response.lock_discovery.locks\n assert_equal 1, locks.size\n lock2 = locks[lock1.token]\n assert_in_delta 10000, lock2.timeout, 50\n\n # check that propfind shows the lock is refreshed\n response = @request.propfind 'file', 0, :lockdiscovery\n assert_equal '207', response.status\n assert_equal '200', response[:lockdiscovery].status\n locks = response[:lockdiscovery].lock_discovery.locks\n assert_equal 1, locks.size\n lock3 = locks[lock1.token]\n assert_in_delta 10000, lock3.timeout, 100\n\n unlock 'file', lock1.token\n ensure\n teardown_file\n end",
"def unlock_access!; end",
"def increase_interval\n @interval = if @attempts.zero?\n 1\n elsif @attempts == 1\n 6\n else\n (@interval * @e_factor).to_i\n end\n\n increase_attempts\n end",
"def attemptlock(node, lockstring, duration)\n self.locktype = \"\" if (self.locktype.nil?)\n \n lockset = self.locktype.split(' ') \n # logger.debug(\"data_product.attemptlock, lockset[0]=#{lockset[0]}, expired=#{self.lockexpired?} \")\n if (lockset.empty? || self.lockexpired? || (!lockset[0].eql?(\"read\") && !lockset[0].eql?(\"write\")) )\n self.locktype = \"#{lockstring} #{node}\"\n self.save!\n else\n # if it's a read lock, we can extend the expiration and add the new lock\n if lockset[0].eql?(\"read\") && lockstring.eql?(\"read\")\n logger.debug(\" extending read lock\")\n lockset.push(node)\n self.locktype = lockset.join(' ')\n self.lockexpire = duration.seconds.from_now\n self.save!\n end\n end\n self\n end",
"def test_put_too_many_locktokens_given\n new_coll 'httplock'\n new_file 'httplock/a', StringIO.new(\"hello\")\n new_file 'httplock/b', StringIO.new(\"world\")\n\n b_locktoken = lock('httplock/b', :depth => 0).token\n a_locktoken = lock('httplock/a', :depth => 0).token\n\n response = @request.put('httplock/a', StringIO.new('hello'), :if => [a_locktoken, b_locktoken])\n assert_equal '412', response.status\n \n response = @request.put('httplock/a', StringIO.new('hello'), { :if => [a_locktoken, b_locktoken], :strict_if => false } )\n assert_equal '204', response.status \n\n # cleanup\n unlock('httplock/a', a_locktoken)\n unlock('httplock/b', b_locktoken)\n delete_coll('httplock')\n end",
"def lock_until_reset\n time_remaining = @reset_time - Time.now\n\n raise \"Cannot sleep for negative duration. Clock may be out of sync.\" if time_remaining.negative?\n\n lock_for(time_remaining)\n end",
"def schedule_recurring_jobs\n EmbargoAutoExpiryJob.perform_later(account)\n LeaseAutoExpiryJob.perform_later(account)\n end",
"def extend_document_expiration(guid)\n post \"/api/documents/#{guid}/extend_expiration.xml\", {}\n end",
"def destroy_older_apps(minutes: TTL_MINUTES, force_refresh: @apps.empty?, on_conflict: :refresh_api_and_continue)\n MUTEX_FILE.flock(File::LOCK_EX)\n\n refresh_app_list if force_refresh\n\n while app = @apps.pop\n age = AppAge.new(created_at: app[\"created_at\"], ttl_minutes: minutes)\n if !age.can_delete?\n @apps.push(app)\n break\n else\n begin\n destroy_with_log(\n id: app[\"id\"],\n name: app[\"name\"],\n reason: \"app age (#{age.in_minutes}m) is older than #{minutes}m\"\n )\n rescue AlreadyDeletedError => e\n if handle_conflict(\n strategy: on_conflict,\n conflict_message: e.message,\n ) == :stop\n break\n end\n end\n end\n end\n ensure\n MUTEX_FILE.flock(File::LOCK_UN)\n end",
"def update_busy\n minutes = 0\n self.subprocesses.each do |subprocess|\n minutes = minutes + (subprocess.minutes+subprocess.setup_time)\n end\n self.update(busy:minutes,available:self.minutes-minutes)\n end",
"def supports_advisory_locks?\n false\n end",
"def supports_advisory_locks?\n false\n end"
] | [
"0.67517155",
"0.59968555",
"0.5689523",
"0.559227",
"0.5391564",
"0.52705073",
"0.5240198",
"0.51786256",
"0.5078256",
"0.5058195",
"0.49850562",
"0.49557242",
"0.4947205",
"0.49445063",
"0.4943477",
"0.49239856",
"0.49131888",
"0.4823957",
"0.4818961",
"0.47674716",
"0.4735707",
"0.47206804",
"0.46884394",
"0.468603",
"0.4682514",
"0.4675321",
"0.46697238",
"0.46539244",
"0.46052846",
"0.4583908",
"0.4582164",
"0.45696193",
"0.45655757",
"0.45642292",
"0.4553751",
"0.45491958",
"0.45475933",
"0.4521308",
"0.45129964",
"0.44919637",
"0.4487296",
"0.44856325",
"0.44813752",
"0.44787747",
"0.44757888",
"0.4457068",
"0.4456607",
"0.4442833",
"0.44381544",
"0.44370732",
"0.44353333",
"0.44300088",
"0.4423239",
"0.44200182",
"0.44124356",
"0.4406786",
"0.44021925",
"0.43946943",
"0.43826827",
"0.43825245",
"0.43729258",
"0.4351485",
"0.4336642",
"0.43363696",
"0.43236247",
"0.43179226",
"0.4308461",
"0.43056118",
"0.42989182",
"0.42935494",
"0.42847133",
"0.4283458",
"0.4283238",
"0.4283238",
"0.4283238",
"0.42811614",
"0.42758372",
"0.42740333",
"0.4267303",
"0.42626363",
"0.42602327",
"0.42427692",
"0.4239864",
"0.42381817",
"0.42367",
"0.42340535",
"0.42308414",
"0.4227745",
"0.4225759",
"0.42229858",
"0.42182776",
"0.42140165",
"0.4213211",
"0.4204155",
"0.4202936",
"0.42001352",
"0.41908503",
"0.41818607",
"0.41775295",
"0.41775295"
] | 0.7650049 | 0 |
Provide a getter and setters for headers | def header(name, value = nil)
if value
(@headers ||= {})[name] = value
else
(@headers || {})[name]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def headers\n end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def headers; end",
"def get_headers\n @headers = headers\n @headers\n end",
"def headers\n @headers ||= {}\n end",
"def headers\n @headers ||= {}\n end",
"def headers(value = nil, &block)\n __define__(:headers, value, block)\n end",
"def headers\n @headers ||={}\n end",
"def headers\n @headers\n end",
"def headers(headers); end",
"def header(name, value)\n har_headers << [name, value]\n super(name, value)\n end",
"def headers\r\n # give access to helpers to opts with a different name\r\n @opts\r\n end",
"def headers(request)\n raise NotImplementedError\n end",
"def headers\n @attributes[:headers]\n end",
"def raw_headers; end",
"def headers\n call_once\n @headers\n end",
"def headers\n @headers ||= HeaderHash.new(@http_response.to_hash)\n end",
"def request_headers; end",
"def request_headers; end",
"def getHeaders\n @headers\n end",
"def headers\r\nHttp::Headers.new(@env)\r\nend",
"def headers\n self.class.const_get(:HEADERS) rescue []\n end",
"def read_headers!; end",
"def add_headers; end",
"def headers; return {}; end",
"def headers; return {}; end",
"def headers=(hash); end",
"def headers=(hash); end",
"def headers=(_arg0); end",
"def headers=(_arg0); end",
"def headers=(_arg0); end",
"def headers=(_arg0); end",
"def headers(header = nil)\n if header.is_a?(Hash)\n @headers ||= {}\n @headers.merge!(header)\n end\n\n return @headers if @headers\n\n superclass.respond_to?(:headers) ? superclass.headers : nil\n end",
"def method_missing(m, *args, &block)\n\t\t\treturn super unless header.respond_to?(m)\n\t\t\theader.send(m, *args, &block)\n\t\tend",
"def http_header(field=nil, value=nil)\n @_http_header ||= {}\n @_http_header[field] = value if field\n @_http_header\n end",
"def headers\n cfg_get(:headers)\n end",
"def headers=(headers)\n @headers = headers if headers.kind_of? Hash\n end",
"def method_missing method, *args, &block\n header.send method, *args, &block\n end",
"def method_missing method, *args, &block\n header.send method, *args, &block\n end",
"def headers\n @headers || true if @use_headers\n end",
"def headers\n {}\n end",
"def headers\n {}\n end",
"def headers\n {}\n end",
"def header\n @header ||= create_header\n end",
"def make_headers(user_headers); end",
"def response_headers\n @headers\n end",
"def headers\n # units and source have to go last, so if we push in a new header, these go\n # at end\n @headers+['units','source']\n end",
"def header_set(name, value)\n return dup_without_response.header_set(name, value) if response\n\n name = name.to_s\n if value.nil?\n @headers.delete name\n return self\n end\n\n @headers[name] = value.to_s\n self\n end",
"def headers\n @headers ||= self.class.beautify_headers(@net_http_res.to_hash)\n end",
"def headers(headers)\n @headers = headers\n end",
"def header(name, value)\n if value.nil?\n @headers.delete(name)\n else\n @headers[name] = value\n end\n end",
"def method_missing(method_name, *args)\n triables = [method_name.to_s, method_name.to_s.upcase, \"HTTP_\" + method_name.to_s.upcase]\n triables.map do | possible_key |\n return @headers[possible_key] if @headers.has_key?(possible_key)\n end\n super(method_name, args)\n end",
"def get_header(*params); raise('Stub or mock required.') end",
"def get_headers\n request_object.headers\n end",
"def processed_headers; end",
"def headers(headers = nil)\n @headers.assign(headers) if headers\n @headers\n end",
"def headers\n @headers.tap do |headers|\n headers[:algorithm] = configuration.algorithm if verification?\n case configuration.claim\n when CLAIM_EXPIRATION_TIME\n headers.merge!(exp_headers)\n when CLAIM_NOT_BEFORE_TIME\n headers.merge!(nbf_headers)\n end\n end\n end",
"def headers=(v)\n check_headers(v)\n set_headers(v)\n v\n end",
"def headers=(hash)\n if headers\n headers.replace hash\n else\n super\n end\n end",
"def set_header name, value\n response_object.header name, value\n end",
"def headers(hash=nil)\n @headers = hash unless hash.nil?\n @headers ||= {}\n end",
"def header(_content)\n raise NotImplementedError\n end",
"def headers=(v)\n cfg_set(:headers, v)\n end",
"def header(str)\n # {{{\n if @output_started\n raise \"HTTP-Headers are already send. You can't change them after output has started!\"\n end\n unless @output_allowed\n raise \"You just can set headers inside of a Rweb::out-block\"\n end\n if str.is_a?Array\n str.each do | value |\n self.header(value)\n end\n\n elsif str.split(/\\n/).length > 1\n str.split(/\\n/).each do | value |\n self.header(value)\n end\n\n elsif str.is_a? String\n str.gsub!(/\\r/, \"\")\n\n if (str =~ /^HTTP\\/1\\.[01] [0-9]{3} ?.*$/) == 0\n pattern = /^HTTP\\/1.[01] ([0-9]{3}) ?(.*)$/\n\n result = pattern.match(str)\n self.setstatus(result[0], result[1])\n elsif (str =~ /^status: [0-9]{3} ?.*$/i) == 0\n pattern = /^status: ([0-9]{3}) ?(.*)$/i\n\n result = pattern.match(str)\n self.setstatus(result[0], result[1])\n else\n a = str.split(/: ?/, 2)\n\n @header[a[0].downcase] = a[1]\n end\n end\n # }}}\n end",
"def getHeader() @header1 end",
"def set_header(name, value)\n @headers[name] = value\n \n return self\n end",
"def headers\n @headers ||= begin\n @mail.header.fields.inject({}) do |memo, field|\n name = field.name.downcase.to_s\n next memo if self.class.skipped_headers.include?(name)\n\n header = unquoted_header(name)\n memo.update(name => self.class.unescape(header))\n end\n end\n end",
"def header(value = nil)\n value ? self.header = value : @header\n end",
"def headers\n @headers ||= {\n \"Content-Type\" => \"application/json\",\n \"Authorization\" => \"Bearer #{@token}\",\n }\n end",
"def method_missing(sym, *args)\n if sym.to_s =~ /=$/\n self[sym.to_s[0..-2].to_sym] = args.first\n elsif @headers.has_key?(sym)\n self[sym]\n else\n super\n end\n end",
"def headers hash=nil\n @response.headers.merge! hash if hash\n @response.headers\n end",
"def headers\n if !block_given?\n return ::Vertx::Util::Utils.safe_create(@j_del.java_method(:headers, []).call(),::Vertx::MultiMap)\n end\n raise ArgumentError, \"Invalid arguments when calling headers()\"\n end",
"def headers=(v)\n cfg_set(:headers, v)\n end",
"def headers\n @headers.merge({ \"Status\" => @status })\n end",
"def []=(key, value)\n @headers[key] = value\n end",
"def headers= headers\n @headers = headers\n headers.each do |attr, value|\n `#@native.setRequestHeader(attr, value)`\n end\n end",
"def headers\n @headers ||= @s3.object_headers(@bucket, @key)\n end",
"def header=(header)\n @header = header\n end",
"def header\n return @header\n end",
"def headers\n @headers ||= message.header_fields\n end",
"def headers\n super\n @headers['User-Agent'] = \"Recurly Ruby Client v#{VERSION}\"\n @headers\n end",
"def request_headers=(_arg0); end",
"def []=(k, v) @headers[translate_header_to_sym(k)] = v end",
"def headers\r\n # NB: return value is supposed to be an array of strings\r\n @headers || []\r\n end",
"def headers\n response.headers\n end",
"def header(hash = {})\n @headers.merge!(hash)\n end",
"def freeze\n @headers.freeze\n super\n end",
"def headers\n @headers.to_a\n end",
"def headers\n @headers.to_a\n end",
"def headers\n if !@headers\n @headers = MultiMap.new(@j_del.headers)\n end\n @headers\n end",
"def headers\n if !@headers\n @headers = MultiMap.new(@j_del.headers)\n end\n @headers\n end",
"def headers\n if !@headers\n @headers = MultiMap.new(@j_del.headers)\n end\n @headers\n end"
] | [
"0.7771739",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.77104694",
"0.76148844",
"0.74984527",
"0.74853534",
"0.7454026",
"0.7425922",
"0.7393394",
"0.73925054",
"0.7367843",
"0.73488545",
"0.7307943",
"0.7248234",
"0.71945596",
"0.719194",
"0.71786624",
"0.71362424",
"0.71362424",
"0.71099085",
"0.708022",
"0.707313",
"0.7069237",
"0.70660615",
"0.7065284",
"0.7065284",
"0.70601153",
"0.70601153",
"0.7039581",
"0.7039581",
"0.7039581",
"0.7039581",
"0.70098656",
"0.6996021",
"0.6993361",
"0.6904922",
"0.69017154",
"0.6901425",
"0.6901425",
"0.6884642",
"0.68813753",
"0.68813753",
"0.68813753",
"0.6871176",
"0.6852025",
"0.6849307",
"0.68474674",
"0.68455315",
"0.6827868",
"0.6820383",
"0.6804783",
"0.67863697",
"0.67843467",
"0.6782273",
"0.6768703",
"0.6760472",
"0.6754359",
"0.67443836",
"0.6736606",
"0.67274064",
"0.67237186",
"0.6715651",
"0.6710858",
"0.6697292",
"0.6694811",
"0.6690152",
"0.668822",
"0.66747284",
"0.6658482",
"0.6635351",
"0.6621784",
"0.6605544",
"0.6592029",
"0.658166",
"0.65790695",
"0.65777624",
"0.6570611",
"0.6562038",
"0.65603834",
"0.6549515",
"0.65433073",
"0.65410393",
"0.65309954",
"0.6529896",
"0.6522068",
"0.6519387",
"0.6518815",
"0.6511742",
"0.6511742",
"0.65097934",
"0.65097934",
"0.65097934"
] | 0.6870623 | 52 |
Allow convenience setters for the data payload | def method_missing(method, *args)
return unless method =~ /.*=/
@data[method.to_s.gsub(/=$/, '')] = args.first
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def data=(data)\n @data = data\n end",
"def data=(data)\n @data = data\n end",
"def data=(new_data)\n @data = nil\n self.data_json = new_data && ActiveSupport::JSON.encode(new_data)\n end",
"def data= data \n end",
"def data=(data)\n @data = data.to_s\n end",
"def set_private(data)\n res = super(self,data)\n return res\n end",
"def data=(data)\n @raw_data = data\n end",
"def set_attribute_data(data, user)\n return {} unless self.can_write_attributes?(data, user)\n data.each_with_object(Hash.new) do | (key, value), result |\n # First we set all the attributes that are allowed\n\n if self.setting_attribute_is_allowed?(key.to_sym, user)\n result[key] = value\n public_send(\"#{key}=\", value)\n elsif value.present?\n # allow nested params to be specified using Rails _attributes\n name = key.to_s.gsub(/_attributes$/,'').to_sym\n\n next unless self.class.has_exported_nested_attribute?(name, user)\n\n association = self.association(name)\n if value.is_a?(Hash) && [:belongs_to,:has_one].include?(association.reflection.macro)\n target = send(name) || association.build\n result[name] = target.set_attribute_data(value, user)\n elsif value.is_a?(Array) && :has_many == association.reflection.macro\n result[name] = _set_attribute_data_from_collection(association, name, value, user)\n end\n end\n end\n end",
"def initialize data\n @data = data\n end",
"def data=(data)\n data.each_pair do |key,val|\n case\n when val.class == Fixnum # 1\n setLong(key.to_s,val)\n when val.class == Float #1.1\n setDouble(key.to_s,val)\n when val.class == Bignum # 11111111111111111\n setLong(key.to_s,val)\n when (val.class == TrueClass) || (val.class == FalseClass)\n setBoolean(key.to_s,val)\n when val.class == NilClass\n setObject(key.to_s,val)\n else\n setString(key.to_s,val.to_s)\n end\n end\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def initialize data\n self.data = data\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def value=(value); self.data.value = value; end",
"def alter_data(data = {})\n @data = @data.merge(data)\n end",
"def data=(obj, klass = obj.class)\n\t\twrite_local(:klass, klass.to_s)\n\t\twrite_local(:data, obj.to_s)\n\t\t@klass = klass.to_s\n\t\t@data = obj.to_s\n\tend",
"def additional_data=(value)\n @additional_data = value\n end",
"def custom_data\n super.attributes\n end",
"def set_payload\n self.payload = self.create_payload\n end",
"def return_data(data)\n @data_representation = data\n end",
"def data=(data)\n @data = data\n if @data.is_a? Integer\n bind_set_count(data)\n elsif @data.is_a? Time\n bind_set_time(data.iso8601)\n end\n end",
"def immutable!\n @data = data.to_smash(:freeze)\n end",
"def accept_data(key,value)\n @data[key] = value\n end",
"def data=(hash)\n data.mass_assign(hash)\n end",
"def data=(p)\n @data = p.freeze\n @value = @target.dup\n @value << \" #{@data}\" if @data != \"\"\n @value.freeze\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def initialize(data = nil)\n ActiveSupport::JSON.decode(data).each do |k, v|\n self.send(\"#{k}=\", v) if self.respond_to?(\"#{k}=\")\n end if data\n end",
"def method_missing(key, value = nil)\n if respond_to?(key)\n super\n else\n @data[key] = value\n end\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def custom_data=(value)\n @custom_data = value\n end",
"def set_data_document(name, data)\n raise NotImplementedError\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def data=(data)\n case data\n when String\n @data = MetaEvent.str_as_bytes(data)\n else\n @data = data\n end\n end",
"def data_representation=(new_val)\n @data_representation = new_val\n end",
"def data_representation=(new_val)\n @data_representation = new_val\n end",
"def method_missing(method_name, *args)\n return super unless permitted_attributes.include?(method_name)\n begin\n object.send(:\"#{method_name}=\", args.first)\n rescue => e\n if params.has_key?(method_name)\n message = \"Unable to process value for :#{method_name}, no attribute writer. Be sure to override the automatic setters for all params that do not map straight to a model attribute.\"\n Rails.logger.warn({message: message,\n missing_writer: method_name,\n value: args.first,\n error: e})\n self.errors << {status: 422, message: message}\n else\n raise e\n end\n end\n end",
"def initialize(data)\n data.each { |key, value| send(\"#{key}=\", value) unless key.nil? }\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def data=(value)\n if value == @defaults['data']\n @values.delete 'data' if @values.key? 'data'\n else\n @values['data'] = value\n end\n end",
"def initialize(data)\n super\n end",
"def initialize(data)\n super\n end",
"def initialize(data = {})\n # Don't modify incoming element!\n @data = data.dup\n @raw = @data.delete(:raw)\n @msg = @data.delete(:msg)\n\n super(data)\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def data_attributes\n end",
"def data(_value=nil, _repair=false)\n raise NotImplementedError.new\n end",
"def initialize(data)\r\n @data = data.dup\r\n end",
"def marshal_load(data)\n self.data = data\n end",
"def initialize(data) # will accept the attribute data. \r\n\r\n @data = data # store the attribute data, and allows it to be used each time. \r\n\r\n end",
"def set_meta(data)\n \n data[LASERID] = self.identity.chomp\n data[LASER_CURRENT] = self.laser_current.chomp\n data[PIEZO_WAVEFORM] = self.piezo_waveform.chomp\n data[PIEZO_FREQUENCY] = self.piezo_frequency.chomp\n data[PIEZO_AMPLITUDE] = self.piezo_amplitude.chomp\n data[PIEZO_OFFSET] = self.piezo_offset.chomp\n data[PIEZO_VOLTAGE] = self.piezo_voltage.chomp\n data[LASER_TEMP] = self.laser_temperature.chomp\n data[CC_ENABLE] = self.cc_enable.chomp\n\n end",
"def payload(data = nil)\n if data\n self.class.new(id, @payload.merge(data))\n else\n @payload\n end\n end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def data; end",
"def marshal\n @data\n end",
"def data=(d)\n old = read_attribute(:data) || Hash.new\n write_attribute(:data, old.merge(d))\n end",
"def set_payload(data)\n @payload = { \n 'caption' => data['caption']['text'], \n 'photo_url' => data['images']['standard_resolution']['url'],\n 'title' => \"#{data['user']['username']}_#{data['created_time']}\"\n }\n @log.debug(\"Payload is #{@payload}\")\n end",
"def data\n raise NotImplementedError\n end",
"def serializer=(_arg0); end",
"def object_data\n raise NotImplementedError, 'Subclass of EVSSClaimBaseSerializer must implement object_data method'\n end",
"def payload=(payload)\n @payload = payload\n @size, @body = @payload.unpack(PAYLOAD_FORMAT)\n @object = JSON.parse(@body)\n end",
"def payload=(_arg0); end",
"def raw=(_); end",
"def do_extended_data(type, data); end",
"def attr_writer( * )\n fail \"Remember, an Entity is immutable. Use a Services::Service to mutate the underlying data.\"\n end",
"def method_missing(method, *args)\n # Give OpenStruct a chance to create getters and setters for the\n # corresponding field\n super method, *args\n\n if field = setter?(method)\n # override setter logic to apply any existing sanitization rules before\n # assigning the new value to the field\n override_setter_for(field) if sanitize?(field)\n # uses the newly created setter to set the field's value and apply any\n # existing sanitization rules\n send(method, args[0])\n end\n end",
"def []=(key, data)\n\t\tset key, data\n\tend",
"def data=(raw_data)\n new_data = Dis::Model::Data.new(self, raw_data)\n attribute_will_change!(\"data\") unless new_data == dis_data\n @dis_data = new_data\n dis_set :content_hash, if raw_data.nil?\n nil\n else\n Storage.file_digest(new_data.read)\n end\n dis_set :content_length, dis_data.content_length\n end",
"def data=(value)\n # Set metadata on object\n attribute_set(:filename, value[:filename])\n attribute_set(:content_type, value[:content_type])\n attribute_set(:size, value[:size])\n\n # Store contents of file (may be a String or a StringIO)\n attribute_set(:data, if value[:tempfile].respond_to?(:rewind)\n value[:tempfile].rewind\n value[:tempfile].read\n else\n value[:tempfile]\n end)\n end",
"def initialize(data)\n @data = data\n end",
"def method_missing(method, *args, &block)\n data.send(method, *args, &block)\n end",
"def data=(_arg0); end",
"def data=(_arg0); end"
] | [
"0.6786163",
"0.6662369",
"0.6483353",
"0.64550596",
"0.6436892",
"0.63887084",
"0.6382805",
"0.6369151",
"0.6340051",
"0.63362247",
"0.6305498",
"0.6305498",
"0.6305498",
"0.6305498",
"0.6305498",
"0.6305498",
"0.6305498",
"0.6305498",
"0.62513494",
"0.6250158",
"0.62466294",
"0.61344075",
"0.6103169",
"0.6086236",
"0.6072443",
"0.6033083",
"0.60213894",
"0.60044974",
"0.59990335",
"0.59970653",
"0.5991064",
"0.5987381",
"0.5987381",
"0.5987381",
"0.5987381",
"0.5987381",
"0.5987381",
"0.5987381",
"0.5987381",
"0.5951399",
"0.59228414",
"0.590878",
"0.590878",
"0.59005016",
"0.5888159",
"0.58859277",
"0.58859277",
"0.588386",
"0.58791876",
"0.58791876",
"0.58771026",
"0.5865613",
"0.5851869",
"0.5851869",
"0.5851869",
"0.5851869",
"0.58505434",
"0.5843158",
"0.5843158",
"0.58409613",
"0.58404434",
"0.58404434",
"0.58404434",
"0.58400875",
"0.5836711",
"0.5836532",
"0.582788",
"0.58267516",
"0.5824057",
"0.5822964",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.58080906",
"0.5804602",
"0.58013684",
"0.5801359",
"0.5793939",
"0.5787485",
"0.57861525",
"0.5781834",
"0.57749206",
"0.57647437",
"0.5752868",
"0.5742025",
"0.5739519",
"0.57332635",
"0.5730142",
"0.57237405",
"0.5723382",
"0.57207495",
"0.57189614",
"0.57189614"
] | 0.0 | -1 |
Shortcut method to deliver a single message | def deliver!
return true unless Apostle.deliver
unless template_id && template_id != ''
raise DeliveryError,
'No email template_id provided'
end
queue = Apostle::Queue.new
queue.add self
queue.deliver!
# Return true or false depending on successful delivery
if queue.results[:valid].include?(self)
return true
else
raise _exception
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def deliver(message)\n raise NotImplementedError\n end",
"def deliver_now!\n message.deliver!\n end",
"def deliver_now\n message.deliver\n end",
"def deliver(message) \n @channel.send(:add_message, message)\n end",
"def deliver_message(options)\n self.sms_service_provider||= default_service_provider\n\n # set the actual message if it differs; not a before_save to reduce the\n # overhead of checking for all commits\n self.actual_message = substituted_draft_message\n\n result = (self.sms_service_provider).send_sms(self)\n self.unique_id = result[:unique_id] if result.is_a?(Hash)\n \n result\n end",
"def deliver(message)\n @queue.push(message)\n end",
"def deliver_now\n processed_smser.handle_exceptions do\n message.deliver\n end\n end",
"def deliver(message)\n check_api_defined(\"Api::AwsSes\")\n\n options = generate_options(message)\n\n response = Api::AwsSes.new(settings).send_raw_email(options)\n message.message_id = response && response.message_id\n\n message\n end",
"def deliver(*)\n true\n end",
"def deliver_now!\n processed_smser.handle_exceptions do\n message.deliver!\n end\n end",
"def deliver(sms)\n new.deliver!(sms)\n end",
"def deliver(to, message)\n if to.is_a?(Array)\n to.each { |t| @client.send(Message.new(t, message)) }\n else\n @client.send(Message.new(to, message))\n end\n end",
"def send_message(method_name, return_type, *args, &b)\n @messenger.deliver(method_name, return_type, *args, &b)\n end",
"def redelivered; @message_impl.getRedelivered; end",
"def deliver!\n self.do_deliver\n end",
"def deliver(message, options = {})\n options = options.reverse_merge default_options\n to = recipient.respond_to?(:call) ? recipient.call(message) : recipient\n message.body message.body.encode(options[:charset]) if options[:charset].presence\n ActionMailerSMSMailer.forward_sms(to, message, options.compact).deliver_now\n message.to\n end",
"def deliver(message, _options = {})\n raise ArgumentError, \"too many recipients, max. is #{MAX_RECIPIENT}\" if message.to.length > MAX_RECIPIENT\n\n # perform request\n @obh_client.send_sms(post_params(message))\n message.to\n end",
"def deliver!(message)\n send_opts = {}\n send_opts[:raw_message] = {}\n send_opts[:raw_message][:data] = message.to_s\n\n if message.respond_to?(:destinations)\n send_opts[:destinations] = message.destinations\n end\n\n @client.send_raw_email(send_opts)\n\n end",
"def deliver(sms)\n create_sms(sms).submit()\n end",
"def deliver\n response = Curl.post(BASE_URL + 'messages/send.json', email_json)\n response = JSON.parse(response.body)\n @response = response.is_a?(Array) ? response.first : response\n end",
"def deliver(message, *mobiles, **opts)\n attrs = {\n msg: message,\n numbers: numbers(mobiles),\n url_callback: callback\n }.merge(opts)\n rest.get(:sendsms, attrs)['data']\n end",
"def deliver!(message)\n send_opts = {}\n send_opts[:raw_message] = {}\n send_opts[:raw_message][:data] = message.to_s\n\n if message.respond_to?(:destinations)\n send_opts[:destinations] = message.destinations\n end\n\n @client.send_raw_email(send_opts)\n end",
"def deliver\r\n mandrill = Mandrill::API.new(api_key)\r\n mandrill.messages.send(message, async, ip_pool, send_at)\r\n end",
"def deliver\r\n raise NotImplementedError.new(\"#{self.class.name}#deliver is not implemented.\")\r\n end",
"def deliver_now\n deliver!\n end",
"def deliver\n return if sent?\n\n begin\n provider = \"#{service.capitalize}Service\".constantize\n provider.new.send(receiver, message)\n\n update_attributes(sent: true)\n rescue NameError\n false\n end\n end",
"def deliver(params={})\n puts \"**** Message#deliver\" if params[:verbose]\n#puts \"**** Message#deliver response_time_limit=#{self.response_time_limit}\"\n save! if self.new_record?\n deliver_email() if send_email\n deliver_sms(:sms_gateway=>params[:sms_gateway] || default_sms_gateway) if send_sms\n end",
"def send(message)\n message\n end",
"def deliver(ev)\n\t\tbegin\n\t\t\tlookup(ev.id).tell(ev)\n\t\trescue NoMethodError => error\n\t\t\tputs \"Gameman: Error delivering event to object #{ev.id}:\"\n\t\t\tputs error\n\t\tend\n\tend",
"def deliver(should_clean = true)\n clean if should_clean\n self.push_data = self.push_data.gsub('[MESSAGE_ID]', self.id.to_s)\n temp_receipts = recipients.map { |r| build_receipt(r, 'inbox', false) }\n if temp_receipts.all?(&:valid?)\n temp_receipts.each(&:save!) #Save receipts\n Alerter::MessageDispatcher.new(self, recipients).call\n #self.recipients = nil\n end\n\n return temp_receipts if temp_receipts.size > 1\n temp_receipts.first\n end",
"def deliver(message)\n body = message.body\n return true unless body\n\n logger.info({context: context, uid: message.uid, action: \"sending to deliverer\", deliverer: delivery.class.name, byte_size: body.bytesize})\n delivery.deliver(body)\n end",
"def send_to_single(pars)\n @message = Message.new(pars)\n @message.sender = current_user\n if @message.save\n flash[:info] = \"Your message has been sent to #{@message.receiver.full_name}\"\n else\n flash[:error] = \"Your message could not be sent to #{@message.receiver.full_name}\"\n end\n end",
"def quick_deliver(opts={})\n raise \"Must implement quick deliver method!\"\n end",
"def method_missing(*args, &block)\n if block_given?\n @message.send(*args, &block)\n else\n @message.send(*args)\n end\n end",
"def delivered_mail(pos = 1)\n ActionMailer::Base.deliveries.last(pos).first\n end",
"def deliver\n Mailer.deliver_message(options)\n end",
"def deliver(message, options = {})\n raise ArgumentError, \"too many recipients, max. is #{MAX_RECIPIENT} (current: #{message.to.length})\" if message.to.length > MAX_RECIPIENT\n\n # Prepare request\n options = prepare_options options\n http = http_client options\n request = http_request post_params(message, options)\n\n # Log as `curl` request\n self.class.logger.debug \"curl -X#{request.method} '#{endpoint}' -d '#{request.body}'\"\n\n # Perform request\n resp = http.request(request)\n\n # Handle errors\n raise ServiceError, \"endpoint did respond with #{resp.code}\" unless resp.code.to_i == 200\n raise ServiceError, \"unable to deliver message to all recipients (CAUSE: #{resp.body.strip})\" unless resp.body.split(\"\\n\").all? { |l| l =~ /Result_code: 00/ }\n\n # extract Nth-SmsIds\n resp['X-Nth-SmsId'].split(',')\n end",
"def deliver!(message)\n delivery_system = get_value_from(message[\"delivery_system\"])\n\n if delivery_system.nil?\n fail Error::WrongDeliverySystem, \"Delivery system is missing.\"\n end\n\n case delivery_system\n when \"ses\"\n DeliverySystem::AwsSes.new(settings).deliver(message)\n when \"sparkpost\"\n DeliverySystem::SparkPost.new(settings).deliver(message)\n else\n fail Error::WrongDeliverySystem,\n \"The given delivery system is not supported.\"\n end\n end",
"def deliver(message)\n queue_job(utf8_encode_message(message))\n @options.logger.info({ delivery_method: 'Que', action: 'message pushed' })\n end",
"def deliver(options = {})\n Kernel.warn 'Message#deliver is deprecated and will be removed from MoteSMS. Please use #deliver_now'\n deliver_now options\n end",
"def deliver!(message)\n params = {\n raw_message: { data: message.to_s },\n source: message.smtp_envelope_from, # defaults to From header\n destinations: message.smtp_envelope_to # defaults to destinations (To,Cc,Bcc)\n }\n @client.send_raw_email(params).tap do |response|\n message.header[:ses_message_id] = response.message_id\n end\n end",
"def deliver\n #inform_interceptors\n if delivery_handler\n delivery_handler.deliver_sms(self) { do_delivery }\n else\n do_delivery\n end\n inform_observers\n self\n end",
"def deliver\n\t\t\t\t\tself.class.deliver(self.id)\n\t\t\t\tend",
"def deliver(payload, opts, key)\n # noOp\n end",
"def deliver(data)\n @queue.enq data\n end",
"def deliver_now\n processed_mailer.handle_exceptions do\n processed_mailer.run_callbacks(:deliver) do\n message.deliver\n end\n end\n end",
"def deliver\n response = Curl::Easy.http_post(\n BASE_URL + 'messages',\n Curl::PostField.content('to', @to),\n Curl::PostField.content('from', @from),\n Curl::PostField.content('subject', @subject),\n Curl::PostField.content('text', @text)\n )\n @response = JSON.parse(response.body)\n end",
"def deliver(*params)\n type = ((resource_type == \"TaskNotification\") ? \n (resource.resource.nil? ?\n resource_type.underscore : \n resource.resource_type.underscore) : \n resource_type.underscore)\n begin\n Mailer.send \"deliver_#{type}\", target_user, params[0]\n rescue\n Mailer.send \"deliver_#{type}\", target_user\n end\n \n update_attribute(:delivered_at, Time.now)\n end",
"def deliver!(sms = @sms) #:nodoc:\n raise \"no SMS object available for delivery!\" unless sms\n logger.info \"Sending SMS: #{sms} via #{sms.delivery}\" unless logger.nil?\n \n begin\n send(\"perform_delivery_#{sms.delivery}\", sms) if perform_deliveries\n rescue Exception => e \n raise e if raise_delivery_errors\n end\n \n logger.info \"SMS Sent!\" unless logger.nil?\n return sms\n end",
"def deliver\n Outbound.deliver(delivery_options.merge(:conditions => ['sms_service_provider_id = ?', self.provider_id]))\n end",
"def deliver(subscription_id, result)\n payload = { result: result.to_h, more: true }\n @action_cable.server.broadcast(stream_subscription_name(subscription_id), payload)\n end",
"def deliver(message)\n start_thread unless @thread.alive?\n @thread.deliver(message)\n #Race condition? Sometimes we need to rescue this and start a new thread\n rescue NoMethodError\n @thread.kill #Try not to leak threads, should already be dead anyway\n start_thread\n retry\n end",
"def send_message(message); end",
"def send_message(message); end",
"def deliver\n self.class.send(self)\nend",
"def deliver!\n deliver if valid?\n save # Returns true if save is successful, returns errors accessible through message.errors if false\n end",
"def deliver\n response = ''\n begin\n response = Client.post(PATH, params)\n # rescue => e\n # response = \"An error occurred with message: #{e.message}\"\n end\n response\n end",
"def deliver(message)\n item = item_for(message)\n\n client.lpush(\"queue:#{options.queue}\", JSON.generate(item))\n\n @options.logger.info({ delivery_method: 'Sidekiq', action: 'message pushed' })\n true\n end",
"def sendmsg(*args, &block)\n inbox.sendmsg(*args, &block)\n end",
"def deliver_now!\n processed_mailer.handle_exceptions do\n processed_mailer.run_callbacks(:deliver) do\n message.deliver!\n end\n end\n end",
"def send_class_message(method_name, return_type, *args, &b)\n @messenger.deliver_to_class(method_name, return_type, *args, &b)\n end",
"def deliver!(handler = deliver_with)\n \"BlabberMouth::DeliveryHandlers::#{handler.to_s.camelcase}\".constantize.deliver(self)\n end",
"def message_delivered(name)\n fail NotImplementedError\n end",
"def forge\n message.set_response(::Item.forge(command_string.subject, message.sender))\n end",
"def deliver_message_via_carrier(text, to, from)\n SMS_CARRIER.deliver_message(text, to, from)\nend",
"def deliver(event)\n payload = event.payload\n\n info(\n message: 'Mail sent',\n mailer_class: payload[:mailer],\n message_id: payload[:message_id],\n current_user: current_user(payload),\n email_subject: payload[:subject],\n email_to_hint: email_to_hint(payload),\n email_from: payload[:from],\n email_date: payload[:date]\n )\n end",
"def markmsgdelivered(msgid)\n req(\"markmsgdelivered\", \"\", \"\", msgid, \"\")\n end",
"def deliver\n session[:d] = nil\n @letter = Letter.where(:hashed => params[:h]).first\n @message = Message.find(params[:m])\n if @letter && @message\n @letter.message = @message\n if params[:d] && params[:d] == 'yes'\n session[:d] = true\n else\n UserMailer.send_email(@letter).deliver \n end\n @letter.delivered\n session[:letter] = @letter\n if !user_signed_in? \n session[:user_id] = @letter.sender.id\n session[:sender] = @letter.sender \n end\n redirect_to delivered_path\n else\n redirect_to root_path, :alert => \"Something went wrong!\"\n end\n end",
"def send_message(name, label, message = \"\")\n encoded = MP.pack({ :uuid => @uuid, :msg => message })\n @exchange.publish(encoded, :type => label, :routing_key => ns(name))\n end",
"def send_message(msg); end",
"def message\n lambda do |data|\n return if public_conversation?(data.channel)\n response = resp(data)\n send_response(response, data)\n end\n end",
"def send\n response = @message.send\n @message = new_message\n unless response.success?\n raise ServiceError, response.raw\n end\n return response\n end",
"def deliver_message\n\t\ttext = interpolate_text @template.text\n\t\tbegin \n\t\t\tsent_sms = send_sms(\"+#{@user.phone}\", text)\n\t\trescue Exception => e\n\t\t\tlogger.info \"Exception thrown interacting with Twillio: #{e}\"\n\t\t\treturn false\n\t\tend\n\n\t\tif(sent_sms.status != \"failed\")\n\t\t\tmessage = Message.create!(user: @user, template: @template, text: text)\n\t\t\ttrue\t\n\t\telse\n\t\t\tlogger.info \"Message failed to send: #{sent_sms}\"\n\t\t\tfalse\n\t\tend\n\tend",
"def do_message\n group = Group.find(params[:id]) || not_found\n flash.keep\n if params[:message][:recipients].blank?\n flash[:error] = 'You must select at least one recipient.'\n redirect_to(action: 'message', id: group.id) && return\n end\n subject = params[:message][:subject]\n message = params[:message][:message]\n recipients = User.find(params[:message][:recipients])\n email = GroupNotifier.send_group_message(\n group, current_user, recipients, subject, message\n ).deliver_now\n flash.keep\n if email\n flash[:notice] = 'Message sent!'\n redirect_to(action: 'show', id: group.id) && return\n else\n flash[:error] = 'Message delivery failed.'\n redirect_to(action: 'message', id: group.id) && return\n end\n end",
"def deliver(recipient, message, options={})\n\t\tprotocol = options[:insecure] ? \"http\" : \"https\"\n\t\turi = URI.parse \"#{protocol}://#{API_HOST}\"\n\t\turi.path = case (options[:mode].to_sym rescue nil)\n\t\t\t\t when nil, :bit\n\t\t\t\t\t SMS_ENDPOINT\n\t\t\t\t when :bulk\n\t\t\t\t\t BULK_SMS_ENDPOINT\n\t\t\t\t else\n\t\t\t\t\t raise StandardError.new \"Bad delivering mode!\"\n\t\t\t\t end\n\n\t\turi.query = URI.encode_www_form({\n\t\t\tusername: @username,\n\t\t\tpassword: @password,\n\t\t\tdstaddr: recipient,\n\t\t\tsmbody: Iconv.new(\"big5\", \"utf-8\").iconv(message),\n\t\t\tdlvtime: (options[:dlvtime] rescue 0),\n\t\t\tvldtime: (options[:vldtime] rescue nil),\n\t\t\tresponse: (options[:response] rescue nil)\n\t\t})\n\n\t\tresponse = Net::HTTP.start(uri.host, use_ssl: uri.scheme == 'https') do |http|\n\t\t\thttp.verify_mode = OpenSSL::SSL::VERIFY_NONE if options[:ignore_cert]\n\t\t\treq = Net::HTTP::Get.new uri\n\t\t\thttp.request(req)\n\t\tend\n\n\t\tparse_response(response.body)[\"kmsgid\"].to_i\n\tend",
"def produce( message )\n @client.set( @name, message )\n return ::Qup::Message.new( message.object_id, message )\n end",
"def send_msg(message, *args)\n # Fix in ruby osc gem\n args = args.map { |a|\n case a\n when true then 1\n when false then 0\n else\n a\n end\n }\n\n case message\n when Message, Bundle\n osc_client.send(message)\n else\n osc_client.send Message.new(message, *args)\n end\n\n self\n end",
"def deliver(who, body, type = :chat)\n msg = ::Jabber::Message.new(who, body)\n msg.type = type\n @client.send(msg)\n end",
"def work(message, to, headers)\n failures = queue_for_x_death(headers['x-death'])\n\n exchange.publish(message,\n routing_key: \"#{queue_name}_delay_#{failures}\",\n persistent: !Proletariat.test_mode?,\n headers: headers.merge('proletariat-to' => to))\n\n nil\n end",
"def message\n call_once\n @message\n end",
"def deliver(subscription_id, result, context)\n raise NotImplementedError\n end",
"def message( *msgs )\n\t\tself.class.message( *msgs )\n\tend",
"def receive\n # Process msg_descriptor\n Subscriber.execute_from_descriptor(msg_descriptor)\n head :no_content\n rescue InvalidSubscriberError\n # 404: Message delivery will be retried\n head :not_found\n rescue StandardError\n # 422: Message delivery will be retried\n head :unprocessable_entity\n end",
"def method_missing(*args, &block)\n if block_given?\n message.send(*args, &block)\n else\n message.send(*args)\n end\n end",
"def deliver_messages\n ensure_threads_running!\n\n @queue << [:deliver_messages, nil]\n\n nil\n end",
"def message( *msgs )\n\t\t\tself.class.message( *msgs )\n\t\tend",
"def deliver\n newsletter_id = params[:id].to_i\n email_addr = params[:email].blank? ? params[:custom_email] : params[:email]\n\n # send_log_to = Rails.env == 'production' ? (Rails.application.class)::EMAIL_TO_DEFAULT : (Rails.application.class)::EMAIL_TO_DEVELOPER\n\n command = \"RAILS_ENV=#{Rails.env} #{Rails.root}/script/bash_runner.sh newsletter_emailer NEWSLETTER=#{newsletter_id}\"\n command += \" EMAIL_ADDR_OVERRIDE=#{email_addr}\" if email_addr\n command += \" &\"\n\n Rails.logger.warn(\"Sending newsletter with command #{command}\")\n pid = spawn(command)\n\n flash[:message] = \"started job_runner to do delivery of newsletter # #{newsletter_id} w/ pid #{pid}\"\n redirect_to :action => :show, :id => newsletter_id\n end",
"def send_sms_message(text, to, from)\n deliver_message_via_carrier(text, to, from)\nend",
"def message(args)\n if (args.size != 2)\n puts \"usage: message [host] [message]\"\n return\n else\n @connections[args[0]].send(TrashBag.new(args[1]))\n end\n end",
"def deliver(subscription_id, result)\n connection = @connections[subscription_id]\n return if connection.nil?\n connection.transmit(result)\n end",
"def message(message)\n get(\"inbox/messages/#{message}\").pop\n end",
"def do_email\n @delivery = Delivery.find(params[:id])\n @email = params[:email]\n \n Notifier.delivery(@email, @delivery).deliver\n \n flash[:notice] = \"The delivery has been sent successfully.\"\n redirect_to \"/deliveries/#{@delivery.id}\"\n end",
"def send_message (*params)\n send_line Message.new(*params)\n end",
"def method_missing(method, *args, &block)\n message.send(method, *args, &block)\n end",
"def deliver(params)\n return false unless is_configured? && valid_params?(params)\n\n # If :recipient is incoming, we'll use that, otherwise, we'll use\n # what was set in the configure block\n params = { recipient: recipient }.merge!(params)\n\n if response = client.post(:message => params)\n response.code == 200\n end\n end",
"def send(message)\n ## empty\n end",
"def deliver_to_queue\n @deliver_to_queue\n end",
"def deliver\n raise Pomodori::Notifier::Error, \"This method needs to be overwritten\"\n end",
"def send_single_message\n now = Time.now.to_i\n delay = Bot::Conf[:core][:throttle]\n\n if not @send_queue_fast.empty?\n str = @send_queue_fast.pop\n elsif not @send_queue_slow.empty?\n str = @send_queue_slow.pop\n end\n\n if str\n if str.length > 512\n $log.error(\"IRCConnection.send_single_message #{@name}\") { \"Message too large: #{str}\" }\n\n EM.add_timer(delay) do\n send_single_message\n end\n end\n\n send_data str\n\n if @registereed\n @history << now\n @history.shift if @history.length == 5\n\n if @history.length == 5 and @history[0] > now - 2\n delay = 2\n $log.info(\"IRCConnection.send_single_essage #{@name}\") { \"Throttling outgoing messages.\" }\n end\n end\n end\n\n @timer = EM.add_timer(delay) { send_single_message }\n end",
"def send_message(message)\n if @selected\n return @selected.send_message(message, @selected)\n end\n end",
"def message(to, text, opts = {})\n Entity::Message.new(self, to, text, opts).deliver\n end"
] | [
"0.7206859",
"0.71881324",
"0.71686906",
"0.7134063",
"0.68219066",
"0.6802479",
"0.6689876",
"0.66111016",
"0.6609674",
"0.66090864",
"0.6603094",
"0.65422374",
"0.65370464",
"0.65368295",
"0.6530035",
"0.6499281",
"0.6467335",
"0.6455981",
"0.6450886",
"0.6448657",
"0.6442849",
"0.6412517",
"0.6404682",
"0.64043885",
"0.63890266",
"0.63465077",
"0.63409543",
"0.6339419",
"0.6310791",
"0.6261103",
"0.62501216",
"0.6214304",
"0.6180469",
"0.61679924",
"0.6166711",
"0.6148523",
"0.6136268",
"0.6133423",
"0.61323667",
"0.61277306",
"0.6122245",
"0.6117802",
"0.61128575",
"0.6106879",
"0.61067075",
"0.61012936",
"0.6092602",
"0.6082132",
"0.60812193",
"0.60776573",
"0.6076063",
"0.60743165",
"0.6060688",
"0.6060688",
"0.6048661",
"0.604608",
"0.60382557",
"0.60249126",
"0.6018447",
"0.6015534",
"0.60104746",
"0.6010439",
"0.6003147",
"0.5998295",
"0.59920824",
"0.5976178",
"0.59492564",
"0.59434164",
"0.5939295",
"0.5925075",
"0.5913951",
"0.59122974",
"0.5907988",
"0.5892391",
"0.58921725",
"0.589111",
"0.58764905",
"0.5875138",
"0.5869464",
"0.58672565",
"0.5866053",
"0.58619756",
"0.5849833",
"0.5849237",
"0.5817659",
"0.58016574",
"0.57917446",
"0.5782858",
"0.57816845",
"0.5776909",
"0.57698077",
"0.5766336",
"0.576102",
"0.5725184",
"0.5723691",
"0.5722029",
"0.57161117",
"0.5708268",
"0.5707598",
"0.57055354",
"0.5704076"
] | 0.0 | -1 |
=begin input: integer (n) output: none return: integer Rules: 1. The answer has to be correct. 2. The final method must not use recursion. 3. No example asks for the 0th element of the sequence Mental Model: Build a fibonacci sequence up to the nth element and return final element (nth element). 1. We need only keep track of the final two elements of the fib sequence thus far [1, 1] to start with (sequence) 2. We need a counter to determine how far into the sequence we are This can be done by counting down from integer given in the argument A counter viarable, x, will be made 3. A loop is needed to iterate until x == given argument integer 4. With each iteration, the sequence array should be ammended such that sequence[1] = sequence[0] + sequence[1] sequence[0] = sequence[1] This can be accomplished by assigning temp variables as follows: last = seuqence[0] + sequence[1] first = sequence[1] Then doing the following: sequence = [first, last] Then x must be reduced by 1 Test: fibonacci(10) =end | def fibonacci(integer)
x = 2
sequence = [1, 1]
until x == integer
last = sequence[0] + sequence[1]
first = sequence[1]
sequence[1] = last
sequence[0] = first
x += 1
end
sequence.last
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fibonacci_sequence(n)\n counter = 1\n fib_sum = 1\n previous = [1]\n if n == 1 || n == 2\n return 1\n else \n until counter == n - 1\n previous << fib_sum\n fib_sum += previous[counter - 1] \n counter += 1\n end\n return fib_sum\n end\nend",
"def fib_sequence1(n)\n return [0] if n == 0\n return [0, 1] if n == 1\n #return [0, 1] if n == 2\n old_seq = fib_sequence1(n-1)\n next_val = fib_sequence1(n-1)[-2] + fib_sequence1(n-1)[-1]\n old_seq + [next_val]\nend",
"def fibs_rec(n, sequence = [0, 1])\n\tif n == 0\n\t\treturn 0\n\telsif n == 1\n\t\treturn 1\n\telse\n\t\tnew_num = sequence[-2] + sequence[-1]\n\t\tsequence << new_num\n\t\tfibs_rec(n-1, sequence)\n\tend\n\tsequence\nend",
"def fibonacci3\n return 0 if n == 0\n return 1 if n == 1 || n == 2 || n == -1\n sequence = [0, 1, 1]\n loop do\n sequence << (sequence[-1] + sequence[-2])\n break if sequence.length > n.abs\n end\n n > 0 ? sequence[-1] : n.even? ? -(sequence[-1]) : sequence[-1]\nend",
"def fib_seq(n)\n return \"#{n} isn't a positive integer number\" if n < 1\n\n fib_array = []\n\n n.times do |number|\n number > 1 ? fib_array.push(fib_array[number - 1] + fib_array[number - 2]) : fib_array.push(number)\n end\n\n fib_array\nend",
"def nthFibonacci (n)\n \n if n == 0\n return 0\n end\n\n i = 0\n\n sequence = Array.new\n\n sequence.push(i)\n\n i += 1\n\n sequence.push(i)\n\n while i < n do\n sequence[i+1] = (sequence[-1]) + (sequence[-2])\n\n i += 1\n end\n\n sequence[-1]\nend",
"def fibonacci(n)\n return [0,1] if n == 2\n return [0] if n <= 1\n sequence = fibonacci(n-1)\n sequence + [sequence[-1] + sequence[-2]]\nend",
"def fibonacci(n)\n \n return 'not valid' if n <= 0\n arr = [0,1,1]\n return arr[n-1] if n <= 3\n\n initial_number = 0\n next_number = 1\n i = 2\n\n\n \n \n while i < n \n sum = initial_number + next_number \n initial_number = next_number\n next_number = sum\n i += 1\n end\n return sum \n\nend",
"def fibonacci(n)\n # raise NotImplementedError\n raise ArgumentError if n < 0 || n == nil #error if negative\n\n i = 0\n fib_array = [0, 1] #all fibonacci sequences begin with 0 and 1\n until fib_array.length == n+1 #until the correct index is reached\n fib_array.push fib_array[i] + fib_array[i+1]\n i += 1\n end\n return fib_array[n] #return number at requested index\nend",
"def fibonacci_iterative(n)\n return 0 if n == 0\n fib_sequence = [0,1]\n index = 1\n until n == index\n fib_sequence << fib_sequence[index] + fib_sequence[index-1]\n index += 1\n end\n fib_sequence.last\nend",
"def fibonacci(n)\n if n == nil || n < 0\n return raise ArgumentError\n end\n if n == 0\n return n\n end\n arr = [1,1]\n y = 1\n x = 1\n until y == n do\n x = arr[0] + arr[1]\n arr[0] = arr[1]\n arr[1] = x\n y += 1\n end\n return arr[0]\nend",
"def fibs (n) #Non-recursive\n\n\t# n = number of Fibonacci sequence members.\n\t# 0, 1, 1, 2, 3, 5, 8, 13, ..\t\n\tfib_seq = []\n\t(0..n).each do |i|\n\t\tif i == 0\n\t\t\tfib_seq << 0\n\t\telsif i == 1\n\t\t\tfib_seq << 1\n\t\telse\n\t\t\tfib_seq << fib_seq[i-2] + fib_seq[i-1]\n\t\tend\n\tend\n\tfib_seq\nend",
"def nth_fibonacci(n)\n return 0 if n == 1\n sequence = [1]\n (n - 2).times do\n current_number, last_number = sequence.last(2)\n sequence << current_number + (last_number || 0)\n end\n\n sequence.last\nend",
"def fib(n)\n \n seq = []\n idx = 0\n loop do \n \n if idx == 0 \n seq.push(0) \n elsif idx == 1\n seq.push(1) \n else\n seq[idx] = seq[idx-1] + seq[idx-2]\n end\n\n break if idx == n\n idx +=1\n end\n\n return seq\n\nend",
"def xbonacci(starting_sequence, number_of_xbonacci_numbers_to_return)\n # create a loop that will run until N is reached\n # loop should take the length of the starting sequence\n # multiple the length by -1\n # add elements from end of array to the length * -1\n\n num_elements_to_sum = starting_sequence.length * - 1\n\n while starting_sequence.length != number_of_xbonacci_numbers_to_return\n starting_sequence << starting_sequence[num_elements_to_sum..-1].reduce(:+)\n end\n\n starting_sequence\n\n\nend",
"def fibonacci(n)\n raise ArgumentError, \"Not an integer larger than 0\" if !n || n < 0\n\n fib_array = [0, 1]\n i = 0\n\n if n == 0\n return 0\n elsif n == 1\n return 1\n else\n until i == (n - 1)\n fib_array << fib_array[i] + fib_array[i + 1]\n i += 1\n end\n end\n return fib_array[n]\nend",
"def generate_fibonacci(n)\n fib_sequence = [0, 1]\n if n == 1 || n == 2\n fib_sequence = fib_sequence.take(n) # Returns first n elements from the array.\n elsif n < 1\n fib_sequence = []\n end\n\n index = 2\n while index < n\n fib_sequence.push(fib_sequence[index-1] + fib_sequence[index-2])\n index += 1\n end\n fib_sequence\nend",
"def fibonacci_basic(n)\n arr = [1, 1]\n return 1 if n == 1\n until arr.size == n\n arr << arr[-1] + arr[-2]\n end\n arr.last\nend",
"def fibonacci_sequence index\n if index == 0 || index == 1\n return index\n else\n return fibonacci_sequence(index - 2) + fibonacci_sequence(index - 1)\n end\nend",
"def fibonacci(n)\n if n == 0\n 0\n elsif n == 1\n 1\n else\n before_previous = 0\n previous = 1\n actual = nil\n (2..n).each do\n actual = previous + before_previous\n before_previous = previous\n previous = actual\n end\n actual\n end\nend",
"def fibonacci(n)\n seq = [0,1]\n i = 2\n while i <= n\n seq << (seq[i-1] + seq[i-2])\n i+=1\n end\n return seq[n]\nend",
"def fibonacci_number(num)\n # make sure fibonacci_number(0) and fibonacci_number(1)\n # start the sequence by returning 1 and 1. \n # This is an edge case\n\n if num <= 2\n return 1\n end\n # start the sequence with the first two numbers\n fibs_ar = [1, 1]\n # for educational purposes\n counter = 1\n\n # calculate the next number until the array has the one we want\n while fibs_ar.length < num\n second_to_last = fibs_ar[fibs_ar.length - 2]\n last = fibs_ar.last\n next_fib = second_to_last + last\n fibs_ar << next_fib\n counter +=1\n puts \"second_to_last = #{second_to_last}\"\n puts \"last = #{last}\"\n puts \"next_fib = #{next_fib}\"\n puts \"counter = #{counter}\"\n puts \"The Fibonacci Series is = #{fibs_ar}\"\n puts \"After #{counter} iterations, the last number is #{last}\"\n puts \"===========================================\"\n end\n print \"The final Fibonacci series is: #{fibs_ar}\"\n print \"\\n\"\n print \"The number in the Fibonacci series at position #{num} is: #{fibs_ar.last}\"\n print \"\\n\"\n fibs_ar.last\nend",
"def fibonacci\n @sequence = [1,1]\n x = 0\n while (@sequence[x+1] < 4e+6)\n @sequence << @sequence[x] + @sequence[x+1]\n x += 1\n end\n even? x\nend",
"def fibonacci(length)\n if length == 0\n return []\n elsif length == 1\n return [1]\n end\n sequence = [1,1]\n\n (length - 2).times do\n sequence << sequence[-1] + sequence[-2]\n end\n\n return sequence\nend",
"def fibs_rec(n, result = [0, 1])\n if n > 1\n result << result[-2] + result[-1]\n fibs_rec(n-1, result)\n end\n result\nend",
"def fib(n)\n\n fib_seq = [0, 1]\n\n while n > 2\n fib_seq << fib_seq[fib_seq.length - 1] + fib_seq[fib_seq.length - 2]\n n -= 1\n end\n\n return fib_seq\n\nend",
"def fibonacci_it(n)\n arr = [1, 1]\n return [] if n <= 0\n return [1] if n == 1\n until arr.length == n\n arr << arr[-1] + arr[-2]\n end\n arr\nend",
"def fibonacci(n)\n return [0,1].take(n) if n <= 2\n fib_seq = fibonacci(n-1)\n last_ele = fib_seq[-1] + fib_seq[-2]\n fib_seq << last_ele\nend",
"def fibonacii_rec(n)\n return [0] if n == 1\n return [0, 1] if n == 2\n fibonacii_rec(n - 1) + [fibonacii_rec(n - 1)[-1] + fibonacii_rec(n - 1)[-2]]\nend",
"def iterative_fib_sequence(n)\n\tfib = [0]\n\tn.times do\n\t\tif fib.length > 2\n\t\t\tfib << fib[-1] + fib[-2]\n\t\telse\n\t\t\tfib << 1\n\t\tend\n\tend\n\tfib\nend",
"def fibonaci(n)\n\tfi= [1, 1]\n\t(n-1).times do\n\t\tfi << fi[1]+fi.shift\n\tend\n\treturn fi[0]\nend",
"def fibs_rec(n)\n if n == 1\n [0]\n elsif n == 2\n [0,1]\n else\n fibs_rec(n-1) << fibs_rec(n-1)[-1] + fibs_rec(n-1)[-2]\n end\nend",
"def fib(integer)\n\nfib_arr = [0,1]\n#declaring the beginning portion of the fibanocci sequence\nindex = 2\n#starting at two because the array is already filled with the 0 & 1 index's filled \n\tuntil index == integer \n# the integer will be the fib(x) for the sequence\n\tnext_number = fib_arr[-1] + fib_arr[-2]\n#the next number in the sequence will equal the last number in the array, represented by -1, and the second to\n#last number represented by -2 added together.\n\tfib_arr << next_number\n#feeding the next_number into the array\n\tindex += 1\n#changing to the next index in the array\n\tend\n\tputs fib_arr\n#printing the new array\nend",
"def fib(number)\n sequence = [0,1]\n number.times {sequence << sequence[-1] + sequence[-2]}\n sequence[-1]\n return sequence\nend",
"def fibonacci_i(n)\n return [] if n == 0\n return [1] if n == 1\n answer = [1, 1]\n while answer.length < n\n answer << answer.last + answer[-2]\n end\n answer\nend",
"def fibonacci (n)\r\n seq = [0, 1]\r\n \r\n (2..n).each do |i|\r\n seq.push seq[i-1] + seq[i-2]\r\n end\r\n \r\n return seq\r\nend",
"def fibonacci(limit)\n sequence = [0]\n if limit == 0\n puts \"Please enter a number above 0\"\n else\n pattern = 1\n (limit - 1).times do |index|\n sequence << pattern\n pattern = pattern + sequence[index]\n end\n puts \"The first #{limit.to_s} numbers in the Fibonacci Sequence are #{sequence}\"\n end\nend",
"def fib_rec(sequence_length)\n if sequence_length == 0\n return [0]\n elsif sequence_length == 1\n return [0,1]\n end\n\n return fib_rec(sequence_length - 1) << fib_rec(sequence_length - 1)[-1] + fib_rec(sequence_length - 2)[-1]\n\n\nend",
"def fibonacci(length)\n \tif length == 0\n return []\n elsif length == 1\n return [1]\n end\n\n sequence = [1, 1]\n while sequence.length < length\n sequence << sequence[-1] + sequence[-2]\n # last second to the last\n end\n return sequence\nend",
"def fibonacci(n)\n if n<=2\n [0,1].take(n)\n else\n arr = fibonacci(n-1)\n arr << arr[-2]+arr[-1]\n end\nend",
"def fibonacci(n)\n raise ArgumentError.new if n.nil? || n < 0\n return n if n == 0\n\n array = [0, 1]\n i = 2\n while i <= n\n array[i] = array[i - 1] + array[i - 2]\n i += 1\n end\n\n return array.last\nend",
"def fibonacci_iterative(n)\n arr = [1,1]\n return arr.take(n) if n <= 2\n (3..n).each do |el|\n arr << arr[-1] + arr[-2]\n end\n arr[-1]\nend",
"def fibs_rec n\n (n < 3) ? [1]*n : fibs_rec(n - 1) << fibs_rec(n - 1)[-1] + fibs_rec(n - 1)[-2]\nend",
"def fibs_rec(n)\n case n\n when 0\n return []\n when 1\n return [0]\n when 2\n return [0,1]\n else\n array = fibs_rec(n-1)\n # offset to n-3 and n-2 to acccount for 0 position start\n array.push(array[n-3] + array[n-2])\n return array\n end\nend",
"def fibonacci(n)\n fibonacci_num = 0\n prior_num = 0\n current_num = 1\n\n if !n || n < 0\n raise ArgumentError\n elsif n == 0 || n == 1\n return n\n else \n (n-1).times do\n fibonacci_num = prior_num + current_num\n prior_num = current_num\n current_num = fibonacci_num\n end\n return current_num\n end\nend",
"def fibonacci(length)\n if length == 0\n return []\n elsif length == 1\n return [1]\n end\n \n seq = [1, 1]\n \n while seq.length < length\n last = seq[-1]\n second_to_last = seq[-2]\n print seq\n puts\n next_ele = last + second_to_last\n puts next_ele\n puts \"-----\"\n seq << next_ele\n end\n \n return seq\n end",
"def fibonacci(int)\n range_array = (1..int).to_a\n final_array = [1,1]\n range_array.each do |num|\n p num > 2 ? final_array << final_array[num-3] + final_array[num-2] : nil\n end\n final_array[-1]\nend",
"def fibonacci(n)\n raise ArgumentError, \"n must be an integer greater than 0\" unless n && n >= 0\n current = 0\n one_before = 0\n two_before = 0\n i = 0\n while i <= n\n if i == 1\n current = 1\n else\n two_before = one_before\n one_before = current\n current = one_before + two_before\n end\n i += 1\n end\n return current\nend",
"def fibonacci_rec(n)\n return [1] if n == 1\n return [1, 1] if n == 2\n return [1, 1,2] if n == 3 \n fibonacci_rec(n-2) + [fibonacci_rec(n-1)[-1]] + [fibonacci_rec(n-1)[-2..-1].sum]\nend",
"def fibonacci(n)\n result = []\n i = 1\n while i <= n\n if i == 1 || i == 2\n result << 1\n else\n result << result[i - 3] + result[i - 2]\n end\n i += 1\n end\n result.last\nend",
"def fibs(n)\n # fibs = [0, 1]\n # return [0] if n == 1\n # return fibs if n == 2\n\n # (n - 2).times do \n # fibs << fibs[-1] + fibs[-2]\n # end\n # return fibs\n\n return 0 if n == 1\n return 1 if n == 2\n\n return fibs(n - 1) + fibs(n - 2)\nend",
"def fib(n)\n\n fib_array = []\n first = 0\n second = 1\n\n if n == 0\n fib_array << nil\n elsif n == 1\n fib_array << 0\n else\n fib_array << 0\n fib_array << 1\n if n >= 3\n (3..n).each do # Due to the zero index of\n # Ruby, we use 3 here to represent the\n # numbers after 0, 1, 1\n\n next_number = (first + second)\n first = second\n fib_array << second = next_number\n\n end\n end\n end\n return fib_array\n\nend",
"def nthFibonacci (n)\r\n num = n.to_i\r\n fibonacci_sequence = Array.new\r\n case num\r\n when 0 \r\n fibonacci_sequence << 0\r\n when 1\r\n fibonacci_sequence << [0,1]\r\n else\r\n fibonacci_sequence[0] = 0\r\n fibonacci_sequence[1] = 1\r\n i = 1\r\n while i < num\r\n i+= 1\r\n fibonacci_sequence[i] = fibonacci_sequence[i-1] + fibonacci_sequence[i-2] \r\n end\r\n end\r\n return fibonacci_sequence\r\nend",
"def fibonacci(length)\n sequence = []\n prev1 = 1\n prev2 = 0\n while sequence.length < length\n if sequence.length == 1\n sequence << prev1\n else\n sequence << prev1 + prev2\n prev2 = prev1\n prev1 = sequence[-1]\n end\n end\n return sequence\nend",
"def fibs_rec(n)\n\tarr = [0,1]\n\tif n == 1\n\t\treturn arr[0]\n\telsif n == 2\n\t\treturn arr\n\telse\n\t\tarr = fibs_rec(n-1)\n\t\tarr << arr[-1] + arr[-2]\n\tend\n\nend",
"def build_fib_seq(n)\n fib = [1, 1]\n count = 2\n until count >= n\n fib << fib[-1] + fib[-2]\n count += 1\n end\n\n fib\nend",
"def fibs(n)\r\n\r\n fibs = [0, 1]\r\n until n == fibs.length\r\n fibs << fibs[-1] + fibs[-2]\r\n end\r\n fibs\r\n\r\nend",
"def fibs(n)\r\n\r\n fibs = [0, 1]\r\n until n == fibs.length\r\n fibs << fibs[-1] + fibs[-2]\r\n end\r\n fibs\r\n\r\nend",
"def fibonacci(n)\n return 1 if n == 1 || n == 2\n\n fibs = [1, 1]\n\n while fibs.length < n\n fibs << fibs[-1] + fibs[-2]\n end\n\n fibs.last\nend",
"def fib(n) #n indicates # of elements we want in array\r\n a = 0\r\n b = 1\r\n fib_arr = []\r\n #loop starts iff n >= 1, but x starts from 0 (when n=1, x=0; n=2, x=0,1)\r\n n.times do |x| \r\n if x == 0\r\n fib_arr << a\r\n elsif x == 1\r\n fib_arr << b\r\n else\r\n c = a+b #c is the new fib # we are generating\r\n fib_arr << c\r\n a = b\r\n b = c\r\n end\r\n end\r\n return fib_arr\r\nend",
"def fibonacci (n)\r\n def calculation(n)\r\n a = 0\r\n b = 1\r\n\r\n # Compute Fibonacci number in the desired position.\r\n n.times do\r\n temp = a\r\n a = b\r\n # Add up previous two numbers in sequence.\r\n b = temp + b\r\n end\r\n\r\n return a\r\n\r\n end \r\n\r\n fib_results = []\r\n\r\n n.times do |n|\r\n\r\n result = calculation(n)\r\n fib_results << result\r\n end \r\n\r\n return fib_results\r\n\r\nend",
"def fibs_rec(n, result = [0, 1])\n\tif n > 1\n\t\tresult << result[-2] + result[-1]\n\t\tfibs_rec(n - 1, result)\n\tend\n\tresult\nend",
"def fibonacci(n)\n return [0] if n==1\n return [0,1] if n==2\n \n previous = fibonacci(n-1)\n previous << previous[-2] + previous.last\n previous\nend",
"def fibs_rec(n)\n if n <= 2 then return n == 2 ? [0,1] : [0] end\n a = fibs_rec(n-1)\n return a << a[-1] + a[-2]\n\nend",
"def fibonacci(int)\n range_array = (1..int).to_a\n final_array = [1,1]\n range_array.each do |num|\n num > 2 ? final_array << final_array[num-3] + final_array[num-2] : nil\n end\n final_array[-1]\nend",
"def fibs_rec(n, fib_array=[0,1])\n return [0] if n == 1\n return fib_array if fib_array.length == n\n fib_array << fib_array[-1] + fib_array[-2]\n fibs_rec(n, fib_array)\nend",
"def fib_rec(n)\n return nil if n < 1\n return [1] if n == 1\n return [1, 1] if n == 2\n current = [1, 1]\n recursive = fib_rec(n - 1)\n (recursive.length - 1).times do |el|\n current << recursive[el] + recursive[el + 1]\n end\n current\nend",
"def fibonacci(n)\n raise ArgumentError if n == nil || n < 0 || n.class != Integer\n return n if n < 2\n\n if n >= 2\n current_value= 0\n previous_value = 1\n next_value = 0\n number = n - 1\n\n number.times do \n next_value = current_value + previous_value \n current_value = previous_value \n previous_value = next_value\n end\n end\n \n return next_value\nend",
"def fibonacci(length)\n if length == 0\n return []\n elsif length == 1\n return [1]\n end\n\n seq = [1, 1]\n while seq.length < length\n last = seq[-1]\n second_to_last = seq[-2]\n print seq\n puts\n next_ele = last + second_to_last\n puts next_ele\n puts \"=====\"\n seq << next_ele\n end\n\n return seq\nend",
"def fibonacci(n)\n # create an array with n elements \n # result_table will contain the final results \n result_table = Array.new(n, 0)\n \n # initial elements of the sequence \n result_table[0] = 0\n result_table[1] = 1\n \n # Fist element already calculated\n # we start with the 3rd \n tab_index = 2\n (n-2).times do\n # the array element is equal to the sum of 2 previous elements\n result_table[tab_index] = result_table[tab_index-1] + result_table[tab_index-2] \n tab_index += 1\n end\n \n return result_table\nend",
"def fibs(n)\n result = []\n penultimate = 0\n last = 1\n ## skips iteration if n = 0\n 1.upto(n) do |num|\n if num == 1\n result << penultimate\n elsif num == 2\n result << last\n else\n next_num = penultimate + last\n penultimate = last\n last = next_num\n result << next_num\n end\n end\n return result\nend",
"def fib1(n)\n fibs = [0, 1]\n ( 0..(n-1) ).each { |idx|\n next if fibs[idx] < 1\n fibs << fibs[idx] + fibs[idx - 1]\n }\n fibs.last\nend",
"def fibonacci(n)\r\n if n == 0\r\n return 0\r\n elsif n == 1\r\n return 1\r\n else\r\n return fibonacci(n-1) + fibonacci(n-2)\r\n end\r\nend",
"def fibonacci(how_many_numbers)\n fibonacci_sequence = []\n count = 0\n while count < how_many_numbers\n if count == 0\n fibonacci_sequence << 0\n count += 1\n elsif count == 1\n fibonacci_sequence << 1\n count += 1\n else\n fibonacci_sequence << fibonacci_sequence[-1] + fibonacci_sequence[-2]\n count += 1\n end\n end\n fibonacci_sequence\nend",
"def fibonacci(n)\n if n <= 2\n 1\n else\n previous, sum = [1, 1]\n 3.upto(n) do\n previous, sum = [sum, previous + sum]\n end\n sum\n end\nend",
"def fibonacci(length)\n if length == 0\n return [] \n elsif length == 1\n return [1]\n end\n array = [1, 1]\n while array.length < length\n array << array[-1] + array[-2]\n end\n array\nend",
"def fibs_rec(n)\n n == 0 || n == 1 ? n : fibs_rec(n-1) + fibs_rec(n-2)\n fibs(n)\nend",
"def fib(n)\n if n == 0\n return [0]\n elsif n == 1\n return [0, 1]\n else\n fib(n-1) << fib(n-1)[-2] + fib(n-1)[-1]\n end\nend",
"def fibonacci(n)\n ary = [0, 1]\n (n - 1).times { |_| ary << ary[-2] + ary[-1] }\n ary.pop\nend",
"def fibonacci number\n sequence = []\n a = 1\n b = 1\n while true\n sequence << b\n newa = b\n b = a+b\n a = newa\n break if b > number\n end\n sequence\nend",
"def fibonacci(n)\n fibs = [1,1]\n return [1, 1].take(n) if n <= 2\n\n while fibs.length < n\n fibs << (fibs[-1] + fibs[-2])\n end\n fibs\nend",
"def fibonacci(sequence_up_to)\n\tfibonacci_sequence = [0, 1]\n\n\t(0...(sequence_up_to - 2)).each {\n\t\tfibonacci_sequence << fibonacci_sequence.last(2).reduce(0, :+)\n\t}\n\n\tfibonacci_sequence\nend",
"def fib_it(n)\n fib_array = [1, 1]\n return fib_array if n == 2\n return [1] if n == 1\n\n (3..n).count { fib_array << fib_array[-2] + fib_array[-1] }\n\n fib_array\nend",
"def fibonacci2(n)\n return 0 if n == 0\n return 1 if n == 1 || n == 2\n last = 0\n lastlast = 1\n current = 0\n n.times do\n current = last + lastlast\n lastlast = last\n last = current\n end\n current\nend",
"def fibonacci( n )\n return n if ( 0..1 ).include? n\n ( fibonacci( n - 1 ) + fibonacci( n - 2 ) )\nend",
"def fibonacci( n )\n return n if ( 0..1 ).include? n\n ( fibonacci( n - 1 ) + fibonacci( n - 2 ) )\nend",
"def fibonacci( n )\n return n if ( 0..1 ).include? n\n ( fibonacci( n - 1 ) + fibonacci( n - 2 ) )\nend",
"def fibonacci(n)\n if n == 0\n 1\n elsif n == 1\n 1\n else\n fibonacci(n-2) + fibonacci(n-1)\n end\nend",
"def fib(n)\n # edge cases:\n if n < 0\n raise Exception, 'Index was negative. No such thing as a negative index in a series.'\n elsif n == 0 || n == 1\n return n\n end\n\n # we'll be building the fibonacci series from the bottom up\n # so we'll need to track the previous 2 numbers at each step\n prev_prev = 0\n prev = 1\n current = prev + prev_prev\n\n # since we already initialized up to the 2nd number in the series\n # we take n - 2 steps ahead to reach n (.times is exclusive)\n (n - 1).times do\n current = prev + prev_prev\n prev_prev = prev\n prev = current\n end\n\n current\nend",
"def fibonacci(length)\n if length == 0\n return []\n elsif length == 1\n return [1]\n end\n \n seq = [1,1]\n while seq.length < length\n last = seq[-1]\n sec_last = seq[-2]\n seq << last + sec_last\n end\n return seq\nend",
"def fibonacci(n)\n if n == nil || n < 0\n raise ArgumentError\n elsif n == 0\n return 0\n elsif n == 1\n return 1\n end\n\n first = 0\n second = 1\n\n index = 1\n while index < n\n holder = first + second\n first = second\n second = holder\n index += 1\n end\n return holder\nend",
"def iterative_fibs(n)\n return [] if n == 0\n return [0] if n == 1\n fibs = [0, 1]\n until fibs.length == n\n fibs << fibs[-1]+ fibs[-2]\n end\n fibs\nend",
"def fibonacci(number_output)\n fibonacci=[1] #default with only one number\n if number_output > 1\n fibonacci << 1 #second digit in fibonnacci sequence\n if number_output > 2\n #already has two numbers in array\n (number_output-2).times do |count|\n fibonacci << fibonacci[-1] + fibonacci[-2]\n end\n end\n end\n print fibonacci\nend",
"def fibonacci(n)\r\n return [] if n < 1\r\n return [1] if n == 1\r\n vet = [0, 1]\r\n (n - 2).times{vet << vet[vet.size-1] + vet[vet.size-2]}\r\n return vet\r\nend",
"def fibonacci(n)\n if n == 0 || n == 1\n return n\n end\n\n first = 0\n second = 1\n current = 1\n\n while n > 2\n first = second\n second = current\n current = first + second\n n -= 1\n end\n\n return current\nend",
"def fibonacci(length)\n\tif length == 0\n \treturn []\n end\n \n \tif length == 1\n return [1]\n end\n \n \tseq = [1, 1]\n \n \t(2...length).each do |i|\n \tseq.push(seq[-1] + seq[-2])\n end\n \t\n \treturn seq\nend",
"def fibs_rec(n, arr = [])\n arr.unshift(n - 1 <= 1 ? n - 1 : fibo(n - 2) + fibo(n - 3))\n return n - 1 > 0 ? fibs_rec(n - 1, arr) : arr\nend",
"def xbonacci(starting_sequence, number_of_xbonacci_numbers_to_return)\n\nend",
"def fibs_rec(n)\n return n if n <= 1\n return fibs_rec(n-2) + fibs_rec(n-1)\nend",
"def xbonacci(starting_sequence, number_of_xbonacci_numbers_to_return)\n result = starting_sequence\n nums_to_sum = starting_sequence.length\n until result.length == number_of_xbonacci_numbers_to_return\n last_x_nums = result[-nums_to_sum..-1]\n result << last_x_nums.reduce(:+)\n end\n result\nend"
] | [
"0.8097728",
"0.8002613",
"0.79860437",
"0.7869245",
"0.7812189",
"0.7802796",
"0.779705",
"0.7764601",
"0.7747439",
"0.77333474",
"0.7711602",
"0.7694139",
"0.76787966",
"0.7674505",
"0.7668799",
"0.7661284",
"0.7656368",
"0.7625008",
"0.7610017",
"0.7609069",
"0.7607894",
"0.76050705",
"0.7604836",
"0.76037574",
"0.75931185",
"0.75927633",
"0.758798",
"0.7582813",
"0.7579528",
"0.75686246",
"0.75668275",
"0.7565117",
"0.7563776",
"0.75526863",
"0.75445276",
"0.75424224",
"0.7526092",
"0.7521327",
"0.75084776",
"0.7506055",
"0.7496477",
"0.74945176",
"0.74940807",
"0.7492162",
"0.74898046",
"0.74729484",
"0.7472852",
"0.7464875",
"0.74481094",
"0.74460423",
"0.7443713",
"0.7441593",
"0.74398077",
"0.7435128",
"0.743321",
"0.7432395",
"0.74250215",
"0.74250215",
"0.74226695",
"0.74135274",
"0.7412398",
"0.74105674",
"0.740875",
"0.74049556",
"0.7402994",
"0.7399175",
"0.73955506",
"0.7386289",
"0.7383174",
"0.7380652",
"0.7379749",
"0.7379217",
"0.73759454",
"0.73748255",
"0.7373491",
"0.7364304",
"0.73640454",
"0.73631597",
"0.73590523",
"0.73579293",
"0.7357371",
"0.7356207",
"0.7354648",
"0.7347482",
"0.7345641",
"0.7345641",
"0.7345641",
"0.7344708",
"0.734221",
"0.7334111",
"0.73332995",
"0.732156",
"0.73206115",
"0.73181397",
"0.7316414",
"0.73160994",
"0.7309432",
"0.7308574",
"0.73050404",
"0.7302267"
] | 0.7847279 | 4 |
GET /selecaos/1 GET /selecaos/1.json | def show
@selecao = Selecao.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @selecao }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n @seguro = Seguro.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @seguro }\n end\n end",
"def index\n @socios = Socio.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @socios }\n end\n end",
"def show\n @soiree = Soiree.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @soiree }\n end\n end",
"def show\n @sezione = Sezione.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @sezione }\n end\n end",
"def show\n @escola = Escola.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @escola }\n end\n end",
"def index\n\n if params[:ventas_seguimiento]\n cliente_id = params[:ventas_seguimiento][:cliente_id]\n @ventas_seguimientos = Ventas::Seguimiento.where(\"cliente_id = ?\",cliente_id).order(\"created_at DESC\").paginate(:page => params[:page], :per_page => 5)\n @seguimientos = Ventas::Seguimiento.new(:cliente_id => cliente_id)\n else\n @ventas_seguimientos = Ventas::Seguimiento.order(\"created_at DESC\").paginate(:page => params[:page], :per_page => 5)\n @seguimientos = Ventas::Seguimiento.new\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ventas_seguimientos }\n end\n end",
"def index\n @seos = Seo.all\n end",
"def show\n @seguidore = Seguidore.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @seguidore }\n end\n end",
"def show\n @ventas_seguimiento = Ventas::Seguimiento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ventas_seguimiento }\n end\n end",
"def show\n @servicio = Servicio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @servicio }\n end\n end",
"def show\n @sistema = Sistema.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @sistema }\n end\n end",
"def show\n @socio = Socio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @socio }\n end\n end",
"def index\n @ores = Ore.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ores }\n end\n end",
"def index\r\n @salles = Salle.all\r\n\r\n respond_to do |format|\r\n format.html # index.html.erb\r\n format.json { render json: @salles }\r\n end\r\n end",
"def index\n @ecole = ecole.id if ecole?\n @semestres = Semestre.ecole(@ecole).all.page(params[:page])\n @semestre = Semestre.new\n end",
"def show\n @solicitud_servicio = SolicitudServicio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @solicitud_servicio }\n end\n end",
"def show\n @socio = Socio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @socio }\n end\n end",
"def index\n @sesions = Sesion.where(entidad_paraestatal_id: @entidad_paraestatal.id).all\n @suplente = Suplente.all\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @sesions }\n end\n end",
"def show\n @estatuto = Estatuto.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estatuto }\n end\n end",
"def show\n @osoba = Osoba.find(params[:id])\n\n render json: @osoba\n end",
"def index\n @colegios = Colegio.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @colegios }\n end\n end",
"def show\n @estudiante = Estudiante.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estudiante }\n end\n end",
"def show\n @oase = Oasis.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @oase }\n end\n end",
"def show\n @colegio = Colegio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @colegio }\n end\n end",
"def index\n @seas = Sea.all\n end",
"def show\n @sugerencia = Sugerencia.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @sugerencia }\n end\n end",
"def new\n @seguro = Seguro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @seguro }\n end\n end",
"def index\n @socio_serasas = SocioSerasa.all\n end",
"def show\n @pessoa = Pessoa.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @pessoa }\n end\n end",
"def show\n @concurso = Concurso.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @concurso }\n end\n end",
"def index\n logement = Logement.find_by(id:params[:logement_id])\n equipement = logement.equi_securites[0].title\n equipements = logement.equi_securites[0]\n\n render json: {\n securites:equipement,\n fichier:equipements\n }\n end",
"def show\n @veiculo = Veiculo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @veiculo }\n end\n end",
"def show\n @estoque = Estoque.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estoque }\n end\n end",
"def show\n @pessoa = Pessoa.find(params[:id])\n\n respond_to do |format|\n # format.html # show.html.erb\n format.json { render json: @pessoa }\n end\n end",
"def show\n @asesor = Asesor.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @asesor }\n end\n end",
"def index\n @clientes = Cliente.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @clientes }\n end\n end",
"def show\n @cso = Cso.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cso }\n end\n end",
"def index\n @soatseguros = Soatseguro.all\n end",
"def show\n @cliente = Cliente.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cliente }\n end\n end",
"def show\n @cliente = Cliente.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cliente }\n end\n end",
"def show\n @cliente = Cliente.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cliente }\n end\n end",
"def consulta\n fiesta = Fiesta.all\n render json: fiesta\n end",
"def show\n @sitio = Sitio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @sitio }\n end\n end",
"def show\n @clientepedido = Clientepedido.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @clientepedido }\n end\n end",
"def show\n @coisa = Coisa.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @coisa }\n end\n end",
"def show\n @cargo_eleicao = CargoEleicao.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cargo_eleicao }\n end\n end",
"def index\n @seguimientos = Seguimiento.all\n end",
"def show\n @tecnico = Tecnico.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @tecnico }\n end\n end",
"def show\n @tecnico = Tecnico.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @tecnico }\n end\n end",
"def index\n @pedidos = Pedido.find(:all, :conditions => [\"cliente_id=?\", session[:usuario_id]])\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @pedidos }\n end\n end",
"def index\r\n# @asistencias = Asistencia.all\r\n seccion = params[:seccion_id]\r\n if params[:seccion_id].nil?\r\n fecha = Date.current\r\n else\r\n fecha = params[:fecha].to_date\r\n end\r\n \r\n @asistencias = Asistencia.por_seccion_fecha(anio_escolar.id, seccion, fecha).salida\r\n\r\n respond_to do |format|\r\n format.html # index.html.erb\r\n format.json { render json: @asistencias }\r\n end\r\n end",
"def show\n @consumo = Consumo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @consumo }\n end\n end",
"def show\n @peso = Peso.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @peso }\n end\n end",
"def show\n @sitio_entrega = SitioEntrega.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @sitio_entrega }\n end\n end",
"def show\n @caixa = Caixa.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @caixa }\n end\n end",
"def show\n @asiento = Asiento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @asiento }\n end\n end",
"def show\n @seo_datum = SeoDatum.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @seo_datum }\n end\n end",
"def show\n @cuerpo = Cuerpo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cuerpo }\n end\n end",
"def show\n @estacionamiento = Estacionamiento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estacionamiento }\n end\n end",
"def show\n @tecnico = Tecnico.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @tecnico }\n end\n end",
"def index\n @sekilas_infos = SekilasInfo.all\n end",
"def show\n @respuesta = Respuesta.find(params[:id])\n\n render json: @respuesta\n end",
"def show\n @spaethi = Spaethi.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @spaethi }\n end\n end",
"def index\n @curso = Curso.find(params[:curso_id])\n @seccions = Seccion.all\n end",
"def show\n @fulcliente = Fulcliente.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fulcliente }\n end\n end",
"def show\n @sabio = Sabio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @sabio }\n end\n end",
"def show\n @cliente = Cliente.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cliente }\n end\n end",
"def show\n @suplente = Suplente.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @suplente }\n end\n end",
"def index\n @sessaos = Sessao.all\n end",
"def index\n @sessaos = Sessao.all\n end",
"def index\n rol = Role.where(:id=>current_user.role).first\n if rol.nombre == \"DN\" or rol.nombre == \"ACRM\"\n @colegiaturas = Colegiatura.all\n else\n @colegiaturas = Colegiatura.where(:sede_id=>current_user.sede)\n end \n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @colegiaturas }\n end\n end",
"def index\n @equipos = Equipo.all\n render json: @equipos, status: :ok\n end",
"def index\n @soirees = Soiree.all\n end",
"def semesters\n uni_year = UniYear.find_by_id(params[:uni_year_id])\n @semesters = uni_year ? uni_year.semesters : []\n \n respond_to do |format|\n format.json { render json: @semesters }\n end\n end",
"def show\n @competicao = Competicao.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @competicao }\n end\n end",
"def find_client\n cliente = get_cliente(params[:id])\n\n respond_to do |format|\n format.json {render json: {client: cliente}}\n end\n end",
"def show\n @cegonha = Cegonha.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cegonha }\n end\n end",
"def new\n @selecao = Selecao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @selecao }\n end\n end",
"def index\n @cooperativas = Cooperativa.where(:status_id => Status.find_by_descricao('Ativo'))\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @cooperativas }\n end\n end",
"def new\n @soiree = Soiree.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @soiree }\n end\n end",
"def show\n @eou = Eou.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @eou }\n end\n end",
"def show\n @asociado = Asociado.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @asociado }\n end\n end",
"def show\n @causale = Causale.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @causale }\n end\n end",
"def show\n @colegiatura = Colegiatura.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @colegiatura }\n end\n end",
"def show\n @sotrudniki = Sotrudniki.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @sotrudniki }\n end\n end",
"def index\n @seguridad_usuarios = Seguridad::Usuario.order('usuario')\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @seguridad_usuarios }\n end\n end",
"def index\n @conseilles = Conseille.all\n respond_to do |format|\n format.html\n format.json { render json: @conseilles}\n end\n end",
"def showdetails\r\n @salle = Salle.find(params[:id])\r\n\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.json { render json: @salle }\r\n end\r\n end",
"def index\n # @sesiunes = Sesiune.all\n end",
"def show\n @serv_adicionale = ServAdicionale.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @serv_adicionale }\n end\n end",
"def show\n @safra_verdoso = SafraVerdoso.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @safra_verdoso }\n end\n end",
"def index\n @ginasios = Ginasio.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @ginasios }\n end\n end",
"def show\n @comentario = Comentario.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @comentario }\n end\n end",
"def show\n @comentario = Comentario.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @comentario }\n end\n end",
"def show\n @ativo_outro = AtivoOutro.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ativo_outro }\n end\n end",
"def index\n @usuarios = Usuario.por_colegio(colegio.id).order(\"nombre\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @usuarios }\n end\n end",
"def index\n @semestres = Semestre.all\n end",
"def show\r\n @asistencia = Asistencia.find(params[:id])\r\n\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.json { render json: @asistencia }\r\n end\r\n end",
"def show\n @empresa_servicio = EmpresaServicio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @empresa_servicio }\n end\n end",
"def show\n @leito = Leito.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @leito }\n end\n end"
] | [
"0.71064365",
"0.6753639",
"0.66718787",
"0.6658466",
"0.66025007",
"0.65756786",
"0.6562154",
"0.653512",
"0.65227085",
"0.6406556",
"0.6405248",
"0.63974494",
"0.6388989",
"0.63684595",
"0.6367398",
"0.6338508",
"0.63342",
"0.6314306",
"0.6309118",
"0.63060516",
"0.6303607",
"0.6299846",
"0.6294385",
"0.62935954",
"0.6255621",
"0.62308174",
"0.6220407",
"0.6214942",
"0.6209806",
"0.62087715",
"0.6206036",
"0.6205999",
"0.6187528",
"0.6185451",
"0.6185367",
"0.61744905",
"0.6168249",
"0.615892",
"0.6151983",
"0.6151983",
"0.6151983",
"0.6150073",
"0.6139414",
"0.6132605",
"0.61326003",
"0.6129763",
"0.61251175",
"0.6123024",
"0.6123024",
"0.6113337",
"0.6109456",
"0.6105795",
"0.6100176",
"0.6099597",
"0.6098664",
"0.6097925",
"0.60943353",
"0.60776454",
"0.60764265",
"0.6074864",
"0.6064093",
"0.60623395",
"0.6061988",
"0.60602033",
"0.60593325",
"0.60593224",
"0.6059078",
"0.6056323",
"0.6055279",
"0.6055279",
"0.605409",
"0.6048775",
"0.6039474",
"0.6039007",
"0.60340035",
"0.6026217",
"0.6020471",
"0.6016492",
"0.601578",
"0.6007939",
"0.6004983",
"0.60030776",
"0.59997433",
"0.59985244",
"0.59968007",
"0.59947735",
"0.59904134",
"0.5986531",
"0.59838384",
"0.5981207",
"0.59810615",
"0.597897",
"0.5972469",
"0.5972469",
"0.5963786",
"0.59606963",
"0.5958578",
"0.59582824",
"0.5956881",
"0.59529275"
] | 0.69207305 | 1 |
GET /selecaos/new GET /selecaos/new.json | def new
@selecao = Selecao.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @selecao }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @seguro = Seguro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @seguro }\n end\n end",
"def new\n @sezione = Sezione.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sezione }\n end\n end",
"def new\n @seguidore = Seguidore.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @seguidore }\n end\n end",
"def new\n @escola = Escola.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @escola }\n end\n end",
"def new\n @socio = Socio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @socio }\n end\n end",
"def new\n @sistema = Sistema.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sistema }\n end\n end",
"def new\n @estatuto = Estatuto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @estatuto }\n end\n end",
"def new\r\n @salle = Salle.new\r\n\r\n respond_to do |format|\r\n format.html # new.html.erb\r\n format.json { render json: @salle }\r\n end\r\n end",
"def new\n @socio = Socio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @socio }\n end\n end",
"def new\n @caixa = Caixa.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @caixa }\n end\n end",
"def new\n @servicio = Servicio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @servicio }\n end\n end",
"def new\n @peso = Peso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @peso }\n end\n end",
"def new\n @suplente = Suplente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @suplente }\n end\n end",
"def new\n @sesion = Sesion.where(entidad_paraestatal_id: @entidad_paraestatal.id).new\n #@sesion.suplente = Suplente.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sesion }\n end\n end",
"def new\n @coisa = Coisa.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @coisa }\n end\n end",
"def new\n @sitio_entrega = SitioEntrega.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sitio_entrega }\n end\n end",
"def new\n @soiree = Soiree.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @soiree }\n end\n end",
"def new\n @ventas_seguimiento = Ventas::Seguimiento.new params[:ventas_seguimiento]\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @ventas_seguimiento }\n end\n end",
"def new\n @sitio = Sitio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sitio }\n end\n end",
"def new\n @tecnico = Tecnico.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tecnico }\n end\n end",
"def new\n @asociado = Asociado.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @asociado }\n end\n end",
"def new\n @veiculo = Veiculo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @veiculo }\n end\n end",
"def new\n @sugerencia = Sugerencia.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @sugerencia }\n end\n end",
"def new\n puts 'NEW METHOD'\n @pessoa = Pessoa.new\n @pessoa.enderecos.build\n 2.times { @pessoa.telefones.build }\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pessoa }\n end\n end",
"def new\n @estudiante = Estudiante.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @estudiante }\n end\n end",
"def new\n @oase = Oasis.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @oase }\n end\n end",
"def create\n @seguro = Seguro.new(params[:seguro])\n\n respond_to do |format|\n if @seguro.save\n format.html { redirect_to @seguro, notice: 'Seguro was successfully created.' }\n format.json { render json: @seguro, status: :created, location: @seguro }\n else\n format.html { render action: \"new\" }\n format.json { render json: @seguro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @sotrudniki = Sotrudniki.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sotrudniki }\n end\n end",
"def new\n @concurso = Concurso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @concurso }\n end\n end",
"def new\n @colegio = Colegio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @colegio }\n end\n end",
"def create\n @selecao = Selecao.new(params[:selecao])\n\n respond_to do |format|\n if @selecao.save\n format.html { redirect_to @selecao, notice: 'Selecao was successfully created.' }\n format.json { render json: @selecao, status: :created, location: @selecao }\n else\n format.html { render action: \"new\" }\n format.json { render json: @selecao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @newspage = Newspage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @newspage }\n end\n end",
"def new\n @spiel = Spiel.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spiel }\n end\n end",
"def new\n @tecnico = Tecnico.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @tecnico }\n end\n end",
"def new\n @cso = Cso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cso }\n end\n end",
"def new\n @cliente = Cliente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cliente }\n end\n end",
"def new\n @cliente = Cliente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cliente }\n end\n end",
"def new\n @cliente = Cliente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cliente }\n end\n end",
"def new\n @cliente = Cliente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cliente }\n end\n end",
"def new\n @cliente = Cliente.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cliente }\n end\n end",
"def new\n @torso = Torso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @torso }\n end\n end",
"def new\n @stone = Stone.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @stone }\n end\n end",
"def new\n @asesor = Asesor.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @asesor }\n end\n end",
"def new\n @tipo_negocio = TipoNegocio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tipo_negocio }\n end\n end",
"def new\n @sala = Sala.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sala }\n end\n end",
"def new\n @etnia = Etnia.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @etnia }\n end\n end",
"def new\n @distro = Distro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @distro }\n end\n end",
"def new\n @distro = Distro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @distro }\n end\n end",
"def new\n @noto = Noto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @noto }\n end\n end",
"def new\n @torneo = Torneo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @torneo }\n end\n end",
"def new\n @atracao = Atracao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @atracao }\n end\n end",
"def new\n @solicitud_servicio = SolicitudServicio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @solicitud_servicio }\n end\n end",
"def new\n @spaethi = Spaethi.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spaethi }\n end\n end",
"def new\n @comisaria = Comisaria.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @comisaria }\n end\n end",
"def new\n @comentario = Comentario.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @comentario }\n end\n end",
"def new\n @po = Po.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @po }\n end\n end",
"def new\n @pologeno = Pologeno.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pologeno }\n end\n end",
"def new\n @estacionamiento = Estacionamiento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @estacionamiento }\n end\n end",
"def new\n @stable = Stable.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @stable }\n end\n end",
"def new\n @descuento = Descuento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @descuento }\n end\n end",
"def new\n @contrato = Contrato.new\n\n respond_to do |format|\n format.html { render layout: nil } # new.html.erb\n format.json { render json: @contrato }\n end\n end",
"def new\n @safra_verdoso = SafraVerdoso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @safra_verdoso }\n end\n end",
"def new\n @empresa = Empresa.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @empresa }\n end\n end",
"def new\n @cuerpo = Cuerpo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cuerpo }\n end\n end",
"def new\n @respuesta = Respuesta.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @respuesta }\n end\n end",
"def new\n @produto = Produto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @produto }\n end\n end",
"def new\n @cliente = Cliente.new\n localidad_new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cliente }\n end\n end",
"def new\n @estoque = Estoque.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @estoque }\n end\n end",
"def new\n @eou = Eou.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @eou }\n end\n end",
"def new\n @trnodo = Trnodo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @trnodo }\n end\n end",
"def new\n @seo_datum = SeoDatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @seo_datum }\n end\n end",
"def new\n @pedido = Pedido.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pedido }\n end\n end",
"def new\n unless possui_acesso?()\n return\n end\n @aviso = Aviso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @aviso }\n end\n end",
"def new\n @evento = Evento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @evento }\n end\n end",
"def new\n @recurso = Recurso.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @recurso }\n end\n end",
"def new\n @tea = Tea.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tea }\n end\n end",
"def new\n @tea = Tea.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tea }\n end\n end",
"def new\n @viaje = Viaje.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @viaje }\n end\n end",
"def new\n @competicao = Competicao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @competicao }\n end\n end",
"def new\n @modelo = Modelo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @modelo }\n end\n end",
"def new\n @sabio = Sabio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sabio }\n end\n end",
"def new\n @testis = Teste.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @testis }\n end\n end",
"def new\n @clue = Clue.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @clue }\n end\n end",
"def new\n @clue = Clue.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @clue }\n end\n end",
"def new\n seleccionarMenu(:juzgados)\n @juzgado = Juzgado.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @juzgado }\n end\n end",
"def new\n @detalle = Detalle.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @detalle }\n end\n end",
"def new\n @lore = Lore.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @lore }\n end\n end",
"def new\n @clientepedido = Clientepedido.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @clientepedido }\n end\n end",
"def new\n @tipo_usuario = TipoUsuario.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tipo_usuario }\n end\n end",
"def new\n @carrera = Carrera.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @carrera }\n end\n end",
"def new\n @producto = Producto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @producto }\n end\n end",
"def new\n @tipo_convenio = TipoConvenio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tipo_convenio }\n end\n end",
"def new\n @orgao = Orgao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @orgao }\n end\n end",
"def new\n @lieu = Lieu.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @lieu }\n end\n end",
"def new\n @ano = Ano.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @ano }\n end\n end",
"def new\n @plannegocio = Plannegocio.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @plannegocio }\n end\n end",
"def new\n @publicidade = Publicidade.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @publicidade }\n end\n end",
"def new\n @spieler = Spieler.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spieler }\n end\n end",
"def new\n @prioridade = Prioridade.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @prioridade }\n end\n end",
"def new\n @produccion = Produccion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @produccion }\n end\n end"
] | [
"0.7673355",
"0.7500561",
"0.74599725",
"0.73434454",
"0.73012733",
"0.7284069",
"0.72820944",
"0.72461057",
"0.7233107",
"0.7222035",
"0.71592706",
"0.7148576",
"0.7142686",
"0.7138414",
"0.7121858",
"0.7118427",
"0.71063447",
"0.70926577",
"0.7088671",
"0.7081683",
"0.70816624",
"0.70723224",
"0.7071649",
"0.7059552",
"0.7048988",
"0.7047617",
"0.7041746",
"0.70384604",
"0.7034667",
"0.70334727",
"0.7029464",
"0.7029082",
"0.7027361",
"0.7026411",
"0.7025253",
"0.7023023",
"0.7023023",
"0.7023023",
"0.7023023",
"0.7023023",
"0.7017815",
"0.70141315",
"0.70073926",
"0.69952047",
"0.699171",
"0.69853675",
"0.6970273",
"0.6970273",
"0.69613606",
"0.6948337",
"0.69466937",
"0.6943344",
"0.6937624",
"0.6935317",
"0.6930871",
"0.6927055",
"0.69240147",
"0.692206",
"0.69203633",
"0.6917578",
"0.69133765",
"0.69117445",
"0.69015324",
"0.6898056",
"0.6897367",
"0.6893347",
"0.6890673",
"0.68888265",
"0.68835723",
"0.68779844",
"0.6877747",
"0.6873232",
"0.68715274",
"0.6869743",
"0.6869147",
"0.6868314",
"0.6868314",
"0.6867516",
"0.6862879",
"0.68627644",
"0.6857178",
"0.68569845",
"0.6856636",
"0.6856636",
"0.68549925",
"0.68505555",
"0.68405026",
"0.68362594",
"0.68357366",
"0.68313473",
"0.68228495",
"0.68212235",
"0.6818889",
"0.68171066",
"0.6816032",
"0.6814835",
"0.68131095",
"0.6813032",
"0.68077296",
"0.68030894"
] | 0.7744536 | 0 |
POST /selecaos POST /selecaos.json | def create
@selecao = Selecao.new(params[:selecao])
respond_to do |format|
if @selecao.save
format.html { redirect_to @selecao, notice: 'Selecao was successfully created.' }
format.json { render json: @selecao, status: :created, location: @selecao }
else
format.html { render action: "new" }
format.json { render json: @selecao.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @seguro = Seguro.new(params[:seguro])\n\n respond_to do |format|\n if @seguro.save\n format.html { redirect_to @seguro, notice: 'Seguro was successfully created.' }\n format.json { render json: @seguro, status: :created, location: @seguro }\n else\n format.html { render action: \"new\" }\n format.json { render json: @seguro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @soatseguro = Soatseguro.new(soatseguro_params)\n\n respond_to do |format|\n if @soatseguro.save\n format.html { redirect_to @soatseguro, notice: 'Soatseguro was successfully created.' }\n format.json { render :show, status: :created, location: @soatseguro }\n else\n format.html { render :new }\n format.json { render json: @soatseguro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @escola = Escola.new(params[:escola])\n\n respond_to do |format|\n if @escola.save\n format.html { redirect_to @escola, :notice => 'Escola was successfully created.' }\n format.json { render :json => @escola, :status => :created, :location => @escola }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @escola.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @soiree = Soiree.new(params[:soiree])\n\n respond_to do |format|\n if @soiree.save\n format.html { redirect_to @soiree, notice: 'Soiree was successfully created.' }\n format.json { render json: @soiree, status: :created, location: @soiree }\n else\n format.html { render action: \"new\" }\n format.json { render json: @soiree.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @socio = Socio.new(params[:socio])\n\n respond_to do |format|\n if @socio.save\n format.html { redirect_to @socio, :notice => 'Socio cadastrado com sucesso.' }\n format.json { render :json => @socio, :status => :created, :location => @socio }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @socio.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @estatuto = Estatuto.new(params[:estatuto])\n\n respond_to do |format|\n if @estatuto.save\n format.html { redirect_to @estatuto, notice: 'Estatuto was successfully created.' }\n format.json { render json: @estatuto, status: :created, location: @estatuto }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estatuto.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sesiune = Sesiune.new(sesiune_params)\n\n respond_to do |format|\n if @sesiune.save\n\n # duplic temele si domeniile din ultima sesiune si le adaug in baza de date cu sesiune_id asta pe care tocmai am creat-o\n @ultima_sesiune = Sesiune.where(este_deschisa: false).last\n Domeniu.where(sesiune_id: @ultima_sesiune.id).each do |dom|\n nou_dom = Domeniu.create(nume: dom.nume, descriere: dom.descriere, user_id: dom.user_id, sesiune_id: @sesiune.id)\n Tema.where(sesiune_id: @ultima_sesiune.id).where(domeniu_id: dom.id).each do |tema|\n Tema.create(nume: tema.nume, descriere: tema.descriere, domeniu_id: nou_dom.id, este_libera: true, user_id: tema.user_id, sesiune_id: @sesiune.id)\n # ce faci dc user_id-ul temei este un student care a terminat? si th i se desfiinteaza contul?\n end\n end\n\n format.html { redirect_to controlPanel_path, notice: 'Sesiune was successfully created.' }\n format.json { render action: 'show', status: :created, location: @sesiune }\n else\n format.html { render action: 'new' }\n format.json { render json: controlPanel_path.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @osoba = Osoba.new(params[:osoba])\n\n if @osoba.save\n render json: @osoba, status: :created, location: @osoba\n else\n render json: @osoba.errors, status: :unprocessable_entity\n end\n end",
"def create\n @socio_serasa = SocioSerasa.new(socio_serasa_params)\n\n respond_to do |format|\n if @socio_serasa.save\n format.html { redirect_to @socio_serasa, notice: 'Socio serasa was successfully created.' }\n format.json { render action: 'show', status: :created, location: @socio_serasa }\n else\n format.html { render action: 'new' }\n format.json { render json: @socio_serasa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @soiree = Soiree.new(soiree_params)\n\n respond_to do |format|\n if @soiree.save\n format.html { redirect_to @soiree, notice: 'Votre évènement a bien été créé.' }\n format.json { render :show, status: :created, location: @soiree }\n else\n format.html { render :new }\n format.json { render json: @soiree.errors, status: :unprocessable_entity }\n end\n end\n end",
"def criar_sobrevivente\n @suvivor = Sobrevivente.create(\n name: params[:name], genero: params[:genero], idade: params[:idade],\n lat: params[:lat], lon: params[:lon],\n agua: params[:agua], comida: params[:comida], medicamento: params[:medicamento],\n municao: params[:municao]\n )\n render json: @suvivor\n end",
"def create\n @seo = Seo.new(seo_params)\n\n respond_to do |format|\n if @seo.save\n format.html { redirect_to @seo, notice: 'Seo was successfully created.' }\n format.json { render :show, status: :created, location: @seo }\n else\n format.html { render :new }\n format.json { render json: @seo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sessao = Sessao.new(sessao_params)\n psicologos_for_select\n pacientes_for_select\n respond_to do |format|\n @sessao.user = current_user\n if @sessao.save\n format.html { redirect_to @sessao, notice: \"Sessao was successfully created.\" }\n format.json { render :show, status: :created, location: @sessao }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @sessao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sessao = Sessao.new(sessao_params)\n\n respond_to do |format|\n if @sessao.save\n format.html { redirect_to @sessao, notice: 'Sessao was successfully created.' }\n format.json { render :show, status: :created, location: @sessao }\n else\n format.html { render :new }\n format.json { render json: @sessao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sintoma = Sintoma.new(sintoma_params)\n\n respond_to do |format|\n if @sintoma.save\n format.html { redirect_to @sintoma, notice: 'Sintoma was successfully created.' }\n format.json { render :show, status: :created, location: @sintoma }\n else\n format.html { render :new }\n format.json { render json: @sintoma.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @solicitante = Solicitante.new(solicitante_params)\n\n respond_to do |format|\n if @solicitante.save\n format.html { redirect_to @solicitante, notice: 'Solicitante was successfully created.' }\n format.json { render :show, status: :created, location: @solicitante }\n else\n format.html { render :new }\n format.json { render json: @solicitante.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sinistro = Sinistro.new(sinistro_params)\n\n respond_to do |format|\n if @sinistro.save\n format.html { redirect_to @sinistro, notice: 'Sinistro was successfully created.' }\n format.json { render :show, status: :created, location: @sinistro }\n else\n format.html { render :new }\n format.json { render json: @sinistro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sejour = current_user.sejours.build(sejour_params)\n\n respond_to do |format|\n if @sejour.save\n format.html { redirect_to @sejour, notice: 'Le sejour a bien ete cree.' }\n format.json { render :show, status: :created, location: @sejour }\n else\n format.html { render :new }\n format.json { render json: @sejour.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n puts 'AQQQQQUUUUUUUIIIIII'\n json = ActiveSupport::JSON.decode(params[:pessoa])\n puts json\n @pessoa = Pessoa.new(json)\n # @address = Address.new(params[:address])\n\n # @client.addresses = @address\n\n respond_to do |format|\n if @pessoa.save\n format.html { redirect_to @pessoa, notice: 'Pessoa was successfully created.' }\n format.json { render json: @pessoa, status: :created, location: @pessoa }\n else\n format.html { render action: \"new\" }\n format.json { render json: @pessoa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sugerencia = Sugerencia.new(params[:sugerencia])\n\n respond_to do |format|\n if @sugerencia.save\n format.html { redirect_to @sugerencia, :notice => 'Sugerencia was successfully created.' }\n format.json { render :json => @sugerencia, :status => :created, :location => @sugerencia }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @sugerencia.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @estoque = Estoque.new(params[:estoque])\n\n respond_to do |format|\n if @estoque.save\n format.html { redirect_to @estoque, notice: 'Estoque was successfully created.' }\n format.json { render json: @estoque, status: :created, location: @estoque }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estoque.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\r\n @salle = Salle.new(params[:salle])\r\n\r\n respond_to do |format|\r\n if @salle.save\r\n format.html { redirect_to @salle, notice: 'Salle was successfully created.' }\r\n format.json { render json: @salle, status: :created, location: @salle }\r\n else\r\n format.html { render action: \"new\" }\r\n format.json { render json: @salle.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n @seguidore = Seguidore.new(params[:seguidore])\n\n respond_to do |format|\n if @seguidore.save\n format.html { redirect_to @seguidore, notice: 'Seguidore was successfully created.' }\n format.json { render json: @seguidore, status: :created, location: @seguidore }\n else\n format.html { render action: \"new\" }\n format.json { render json: @seguidore.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ventas_seguimiento = Ventas::Seguimiento.new(params[:ventas_seguimiento])\n @ventas_seguimiento.usuario = current_user.name\n @ventas_seguimiento.user = current_user\n\n respond_to do |format|\n if @ventas_seguimiento.save\n format.html { redirect_to @ventas_seguimiento, notice: 'Seguimiento was successfully created.' }\n format.json { render json: @ventas_seguimiento, status: :created, location: @ventas_seguimiento }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ventas_seguimiento.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sezione = Sezione.new(params[:sezione])\n\n respond_to do |format|\n if @sezione.save\n format.html { redirect_to sezioni_path, notice: 'Sezione was successfully created.' }\n format.json { render json: @sezione, status: :created, location: @sezione }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sezione.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @segundo = Segundo.new(segundo_params)\n\n respond_to do |format|\n if @segundo.save\n format.html { redirect_to @segundo, notice: 'Segundo was successfully created.' }\n format.json { render :show, status: :created, location: @segundo }\n else\n format.html { render :new }\n format.json { render json: @segundo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @asesor = Asesor.new(params[:asesor])\n\n respond_to do |format|\n if @asesor.save\n format.html { redirect_to @asesor, notice: 'Asesor was successfully created.' }\n format.json { render json: @asesor, status: :created, location: @asesor }\n else\n format.html { render action: \"new\" }\n format.json { render json: @asesor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @solicitud = Solicitud.new(solicitud_params)\n\n respond_to do |format|\n if @solicitud.save\n format.html { redirect_to @solicitud, notice: \"Solicitud was successfully created.\" }\n format.json { render :show, status: :created, location: @solicitud }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @solicitud.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @solicitador = Solicitador.new(solicitador_params)\n\n respond_to do |format|\n if @solicitador.save\n format.html { redirect_to @solicitador, notice: 'Solicitador was successfully created.' }\n format.json { render :show, status: :created, location: @solicitador }\n else\n format.html { render :new }\n format.json { render json: @solicitador.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @secco = @curso.seccos.build(secco_params)\n\n respond_to do |format|\n if @secco.save\n format.html { redirect_to curso_secco_path(@curso, @secco), notice: 'Seccao was successfully created.' }\n format.json { render :show, status: :created, location: @secco }\n else\n format.html { render :new }\n format.json { render json: @secco.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @seguimiento = Seguimiento.new(seguimiento_params)\n\n respond_to do |format|\n if @seguimiento.save\n format.html { redirect_to @seguimiento.caso, notice: \"Seguimiento creado.\" }\n format.json { render :show, status: :created, location: @seguimiento }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @seguimiento.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sucursale = Sucursale.new(sucursale_params)\n @sucursale.usuarios_id = current_usuario.id\n respond_to do |format|\n if @sucursale.save\n format.html { redirect_to @sucursale, notice: 'Sucursal creada con exito!' }\n format.json { render :show, status: :created, location: @sucursale }\n else\n format.html { render :new }\n format.json { render json: @sucursale.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @solicitud = Solicitud.new(solicitud_params)\n\n respond_to do |format|\n if @solicitud.save\n format.html { redirect_to @solicitud, notice: 'Solicitud was successfully created.' }\n format.json { render action: 'show', status: :created, location: @solicitud }\n else\n format.html { render action: 'new' }\n format.json { render json: @solicitud.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @establecimiento = Establecimiento.new(establecimiento_params)\n\n respond_to do |format|\n if @establecimiento.save\n format.html { redirect_to @establecimiento, notice: 'Establecimiento was successfully created.' }\n format.json { render :show, status: :created, location: @establecimiento }\n else\n format.html { render :new }\n format.json { render json: @establecimiento.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @solicitacao = Solicitacao.new(solicitacao_params)\n\n respond_to do |format|\n if @solicitacao.save\n format.html { redirect_to @solicitacao, notice: 'Solicitacao was successfully created.' }\n format.json { render :show, status: :created, location: @solicitacao }\n else\n format.html { render :new }\n format.json { render json: @solicitacao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n pessoa = Pessoa.new(pessoa_params) \n \n if pessoa.save\n render json: {status: 'SUCCESSO', message:'Usuário cadastrado com sucesso!', data:pessoa},status: :ok\n else\n render json: {status: 'ERRO', message:'Usuário não pode ser cadastrado. Tente novamente mais tarde.', data:pessoa.errors},status: :unprocessable_entity\n end\n end",
"def create\n @uni_aso = UniAso.new(uni_aso_params)\n\n respond_to do |format|\n if @uni_aso.save\n format.html { redirect_to @uni_aso, notice: 'Uni aso was successfully created.' }\n format.json { render :show, status: :created, location: @uni_aso }\n else\n format.html { render :new }\n format.json { render json: @uni_aso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n client = Cliente.new\n\n client.nombre = params[:nombre]\n client.cedula = params[:cedula]\n client.pagina = params[:pagina]\n\n client.dirrecion = params[:dirrecion]\n client.telefono = params[:telefono]\n \n client.sector = params[:sector]\n \n\n if client.save\n \n\n render(json: client,status: 201 ,location: client)\n else\n\n render(json: client.errors,status: 422 )\n\n end\n end",
"def create\n @respuesta = Respuesta.new(params[:respuesta])\n\n if @respuesta.save\n render json: @respuesta, status: :created, location: @respuesta\n else\n render json: @respuesta.errors, status: :unprocessable_entity\n end\n end",
"def create\n\n @salle = current_user.salles.new(salle_params)\n\n respond_to do |format|\n if @salle.save\n format.html { redirect_to salles_url, notice: 'Salle was successfully created.' }\n format.json { render :show, status: :created, location: @salle }\n else\n format.html { render :new }\n format.json { render json: @salle.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @asiento = Asiento.new(params[:asiento])\n\n respond_to do |format|\n if @asiento.save\n format.html { redirect_to @asiento, :notice => 'El apunte fue creado.' }\n format.json { render :json => @asiento, :status => :created, :location => @asiento }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @asiento.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @sexe = Sexe.new(sexe_params)\n\n respond_to do |format|\n if @sexe.save\n format.html { redirect_to @sexe, notice: 'Sexe was successfully created.' }\n format.json { render :show, status: :created, location: @sex }\n else\n format.html { render :new }\n format.json { render json: @sexe.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @stone = Stone.new(stone_params)\n\n \n if @stone.save\n respond_with @stone\n else\n render json: @stone.errors, status: :unprocessable_entity \n end\n \n \n end",
"def create\n @semestre = Semestre.new(semestre_params)\n\n respond_to do |format|\n if @semestre.save\n format.html { redirect_to @semestre, notice: 'Semestre was successfully created.' }\n format.json { render :show, status: :created, location: @semestre }\n else\n format.html { render :new }\n format.json { render json: @semestre.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n @semestre = current_user.semestres.new(semestre_params)\n\n respond_to do |format|\n if @semestre.save\n format.html { redirect_to semestres_url, notice: 'Semestre was successfully created.' }\n format.json { render :show, status: :created, location: @semestre }\n else\n format.html { render :new }\n format.json { render json: @semestre.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sugestao = Sugestao.new(sugestao_params)\n\n respond_to do |format|\n if @sugestao.save\n format.html { redirect_to @sugestao, notice: 'Sugestão salva com sucesso.' }\n format.json { render :show, status: :created, location: @sugestao }\n else\n format.html { render :new }\n format.json { render json: @sugestao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sekilas_info = SekilasInfo.new(sekilas_info_params)\n\n respond_to do |format|\n if @sekilas_info.save\n format.html { redirect_to @sekilas_info, notice: 'Sekilas info was successfully created.' }\n format.json { render action: 'show', status: :created, location: @sekilas_info }\n else\n format.html { render action: 'new' }\n format.json { render json: @sekilas_info.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @safra_averiado = SafraAveriado.new(params[:safra_averiado])\n\n respond_to do |format|\n if @safra_averiado.save\n format.html { redirect_to @safra_averiado, notice: 'Safra averiado was successfully created.' }\n format.json { render json: @safra_averiado, status: :created, location: @safra_averiado }\n else\n format.html { render action: \"new\" }\n format.json { render json: @safra_averiado.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @seihinn = Seihinn.new(seihinn_params)\n\n respond_to do |format|\n if @seihinn.save\n format.html { redirect_to @seihinn, notice: \"Seihinn was successfully created.\" }\n format.json { render :show, status: :created, location: @seihinn }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @seihinn.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @eou = Eou.new(params[:eou])\n\n respond_to do |format|\n if @eou.save\n format.html { redirect_to @eou, :notice => 'Eou was successfully created.' }\n format.json { render :json => @eou, :status => :created, :location => @eou }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @eou.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @cargo_eleicao = CargoEleicao.new(params[:cargo_eleicao])\n\n respond_to do |format|\n if @cargo_eleicao.save\n format.html { redirect_to @cargo_eleicao, notice: 'Cargo eleicao was successfully created.' }\n format.json { render json: @cargo_eleicao, status: :created, location: @cargo_eleicao }\n else\n format.html { render action: \"new\" }\n format.json { render json: @cargo_eleicao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sesion = Sesion.where(entidad_paraestatal_id: @entidad_paraestatal.id).new(params[:sesion])\n\n respond_to do |format|\n if @sesion.save\n format.html { redirect_to [@entidad_paraestatal,@sesion], notice: 'Sesion was successfully created.' }\n format.json { render json: @sesion, status: :created, location: [@entidad_paraestatal,@sesion] }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sesion.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @socio = Socio.new(socio_params)\n\n respond_to do |format|\n if @socio.save\n format.html { redirect_to @socio, notice: 'Socio criado com sucesso.' }\n format.json { render :show, status: :created, location: @socio }\n else\n format.html { render :new }\n format.json { render json: @socio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @souvenior = Souvenior.new(souvenior_params)\n\n respond_to do |format|\n if @souvenior.save\n format.html { redirect_to root_path, notice: 'Souvenior was successfully created.' }\n format.json { render :show, status: :created, location: @souvenior }\n else\n format.html { render :new }\n format.json { render json: @souvenior.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @serie_detalle = SerieDetalle.new(serie_detalle_params)\n\n respond_to do |format|\n if @serie_detalle.save\n format.html { redirect_to @serie_detalle, notice: 'Serie detalle was successfully created.' }\n format.json { render :show, status: :created, location: @serie_detalle }\n else\n format.html { render :new }\n format.json { render json: @serie_detalle.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @nebulosa = Nebulosa.new(nebulosa_params)\n\n respond_to do |format|\n if @nebulosa.save\n format.html { redirect_to @nebulosa, notice: 'Nebulosa was successfully created.' }\n format.json { render :show, status: :created, location: @nebulosa }\n else\n format.html { render :new }\n format.json { render json: @nebulosa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @inventario_cosa = InventarioCosa.new(inventario_cosa_params)\n\n respond_to do |format|\n if @inventario_cosa.save\n format.html { redirect_to @inventario_cosa, notice: 'Inventario cosa was successfully created.' }\n format.json { render :show, status: :created, location: @inventario_cosa }\n else\n format.html { render :new }\n format.json { render json: @inventario_cosa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @responsavel = Responsavel.new(responsavel_params)\n\n if @responsavel.save\n render json: @responsavel, status: :created, location: @responsavel\n else\n render json: @responsavel.errors, status: :unprocessable_entity\n end\n end",
"def create\n @exura = Exura.new(params[:exura])\n\n respond_to do |format|\n if @exura.save\n format.html { redirect_to @exura, :notice => 'Exura was successfully created.' }\n format.json { render :json => @exura, :status => :created, :location => @exura }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @exura.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @simulado = Simulado.new(simulado_params)\n\n respond_to do |format|\n if @simulado.save\n format.html { redirect_to @simulado, notice: 'Simulado was successfully created.' }\n format.json { render :show, status: :created, location: @simulado }\n else\n format.html { render :new }\n format.json { render json: @simulado.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sabio = Sabio.new(params[:sabio])\n\n respond_to do |format|\n if @sabio.save\n format.html { redirect_to @sabio, notice: 'El Sabio a sido creado exitosamente.' }\n format.json { render json: @sabio, status: :created, location: @sabio }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sabio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @silla = Silla.new(silla_params)\n\n respond_to do |format|\n if @silla.save\n format.html { redirect_to @silla, notice: 'Silla was successfully created.' }\n format.json { render :show, status: :created, location: @silla }\n else\n format.html { render :new }\n format.json { render json: @silla.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @solicitud_servicio = SolicitudServicio.new(params[:solicitud_servicio])\n\n respond_to do |format|\n if @solicitud_servicio.save\n format.html { redirect_to @solicitud_servicio, notice: 'Solicitud servicio was successfully created.' }\n format.json { render json: @solicitud_servicio, status: :created, location: @solicitud_servicio }\n else\n format.html { render action: \"new\" }\n format.json { render json: @solicitud_servicio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @spaethi = Spaethi.new(params[:spaethi])\n\n respond_to do |format|\n if @spaethi.save\n format.html { redirect_to @spaethi, notice: 'Spaethi was successfully created.' }\n format.json { render json: @spaethi, status: :created, location: @spaethi }\n else\n format.html { render action: \"new\" }\n format.json { render json: @spaethi.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @esjiaoben = Esjiaoben.new(esjiaoben_params)\n\n respond_to do |format|\n if @esjiaoben.save\n format.html { redirect_to @esjiaoben, notice: 'Esjiaoben was successfully created.' }\n format.json { render :show, status: :created, location: @esjiaoben }\n else\n format.html { render :new }\n format.json { render json: @esjiaoben.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @saida = Saida.new(saida_params)\n\n respond_to do |format|\n if @saida.save\n format.html { redirect_to @saida, notice: 'Saida was successfully created.' }\n format.json { render :show, status: :created, location: @saida }\n else\n format.html { render :new }\n format.json { render json: @saida.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @espetaculo = Espetaculo.new(espetaculo_params)\n\n respond_to do |format|\n if @espetaculo.save\n format.html { redirect_to @espetaculo, notice: 'Espetaculo was successfully created.' }\n format.json { render :show, status: :created, location: @espetaculo }\n else\n format.html { render :new }\n format.json { render json: @espetaculo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ejemplo = Ejemplo.new(ejemplo_params)\n\n respond_to do |format|\n if @ejemplo.save\n format.html { redirect_to @ejemplo, notice: 'Ejemplo was successfully created.' }\n format.json { render :show, status: :created, location: @ejemplo }\n else\n format.html { render :new }\n format.json { render json: @ejemplo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ess = Esse.new(ess_params)\n\n respond_to do |format|\n if @ess.save\n format.html { redirect_to esses_url, notice: 'Esse was successfully created.' }\n format.json { render :show, status: :created, location: @ess }\n else\n format.html { render :new }\n format.json { render json: @ess.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sotrudniki = Sotrudniki.new(params[:sotrudniki])\n\n respond_to do |format|\n if @sotrudniki.save\n format.html { redirect_to @sotrudniki, notice: 'Sotrudniki was successfully created.' }\n format.json { render json: @sotrudniki, status: :created, location: @sotrudniki }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sotrudniki.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @processo_seletivo = ProcessoSeletivo.new(processo_seletivo_params)\n\n respond_to do |format|\n if @processo_seletivo.save\n format.html { redirect_to @processo_seletivo, notice: 'Processo seletivo was successfully created.' }\n format.json { render :show, status: :created, location: @processo_seletivo }\n else\n format.html { render :new }\n format.json { render json: @processo_seletivo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @seguro = Seguro.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @seguro }\n end\n end",
"def create\n @souscripteur = Souscripteur.new(souscripteur_params)\n\n respond_to do |format|\n if @souscripteur.save\n format.html { redirect_to @souscripteur, notice: 'Souscripteur was successfully created.' }\n format.json { render :show, status: :created, location: @souscripteur }\n else\n format.html { render :new }\n format.json { render json: @souscripteur.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @usuario_seguidor = UsuarioSeguidor.new(usuario_seguidor_params)\n\n respond_to do |format|\n if @usuario_seguidor.save\n format.html { redirect_to @usuario_seguidor, notice: 'Usuario seguidor was successfully created.' }\n format.json { render :show, status: :created, location: @usuario_seguidor }\n else\n format.html { render :new }\n format.json { render json: @usuario_seguidor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\r\n @sivic_pessoa = SivicPessoa.new(sivic_pessoa_params)\r\n\r\n respond_to do |format|\r\n if @sivic_pessoa.save\r\n format.html { redirect_to @sivic_pessoa, notice: 'Registro inserido com sucesso.' }\r\n format.json { render action: 'show', status: :created, location: @sivic_pessoa }\r\n else\r\n format.html { render action: 'new' }\r\n format.json { render json: @sivic_pessoa.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n client= Client.new\n client.cedula= params[:cedula]\n client.sector= params[:sector]\n client.nombre= params[:nombre]\n client.telefono= params[:telefono]\n client.pagina= params[:pagina]\n client.direccion= params[:direccion]\n if client.save\n render(json: client, status: 201 , location: client)\n else \n render(json: client.errors, status: 422)\n end\n end",
"def create\n @estrada = Estrada.new(estrada_params)\n\n respond_to do |format|\n if @estrada.save\n format.html { redirect_to estradas_url, notice: 'Estrada criada com sucesso.' }\n format.json { render :show, status: :created, location: @estrada }\n else\n format.html { render :new }\n format.json { render json: @estrada.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n puts request.body.string\n\n if request.body.string.include? %q[\"id\"]\n render json: %q[{\"error\": \"No se puede crear usuario con id\"}], status: 400\n else\n @usuario = Usuario.new(usuario_params)\n #Tuve que hacerlo asi, pq por alguna razon no me dejaba de la forma tradicional!\n #@usuario = Usuario.new\n #@usuario.usuario = params[:usuario]\n #@usuario.nombre = params[:nombre]\n #@usuario.apellido = params[:apellido]\n #@usuario.twitter = params[:twitter]\n\n\n respond_to do |format|\n if @usuario.save\n #format.html { redirect_to @usuario, notice: 'Usuario was successfully created.' }\n format.json { render :show, status: :created, location: @usuario }\n else\n #format.html { render :new }\n format.json { render json: @usuario.errors, status: 404}# status: :unprocessable_entity }\n end\n end\n end\n end",
"def create\n @saida_produto = SaidaProduto.new(saida_produto_params)\n\n respond_to do |format|\n if @saida_produto.save\n format.html { redirect_to @saida_produto, notice: 'Saida produto was successfully created.' }\n format.json { render :show, status: :created, location: @saida_produto }\n else\n format.html { render :new }\n format.json { render json: @saida_produto.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sistema = Sistema.new(params[:sistema])\n\n respond_to do |format|\n if @sistema.save\n format.html { redirect_to @sistema, notice: 'Sistema was successfully created.' }\n format.json { render json: @sistema, status: :created, location: @sistema }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sistema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @servicio = Servicio.new(params[:servicio])\n\n respond_to do |format|\n if @servicio.save\n format.html { redirect_to @servicio, :notice => 'Servicio was successfully created.' }\n format.json { render :json => @servicio, :status => :created, :location => @servicio }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @servicio.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n \n\n respond_to do |format|\n if @evolucion.save\n format.html { redirect_to @evolucion, notice: 'Evolucion was successfully created.' }\n format.json { render :show, status: :created, location: @evolucion }\n else\n format.html { render :new }\n format.json { render json: @evolucion.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @etnia = Etnia.new(params[:etnia])\n\n respond_to do |format|\n if @etnia.save\n format.html { redirect_to @etnia, notice: 'Etnia was successfully created.' }\n format.json { render json: @etnia, status: :created, location: @etnia }\n else\n format.html { render action: \"new\" }\n format.json { render json: @etnia.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n respond_to do |format|\n if @especialidad.save\n format.html { redirect_to @especialidad, notice: 'Servicio creado exitosamente.' }\n format.json { render :show, status: :created, location: @especialidad }\n else\n format.html { render :new }\n format.json { render json: @especialidad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @producto_seccion = ProductoSeccion.new(producto_seccion_params)\n\n respond_to do |format|\n if @producto_seccion.save\n format.html { redirect_to @producto_seccion, notice: 'Producto seccion was successfully created.' }\n format.json { render :show, status: :created, location: @producto_seccion }\n else\n format.html { render :new }\n format.json { render json: @producto_seccion.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @socio = Socio.new(params[:socio])\n\n respond_to do |format|\n @socio.save\n format.html { redirect_to socios_url}\n end\n end",
"def create\n @socio = Socio.new(socio_params)\n respond_to do |format|\n if @socio.save\n format.html { redirect_to new_socio_path, notice: 'El socio fue creado correctamente.' }\n format.json { render action: 'show', status: :created, location: @socio }\n else\n format.html { render action: 'new' }\n format.json { render json: @socio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def creacion\n fiesta = Fiesta.new (params[:id])\n if Fiesta.save\n puts \"su fiesta a sido registrada\"\n else \n puts \"su fiesta no a sido registrada\"\n end\n render = json: fiesta \n end",
"def create\n @anteproyecto_estudiante = AnteproyectoEstudiante.new(anteproyecto_estudiante_params)\n\n respond_to do |format|\n if @anteproyecto_estudiante.save\n format.html { redirect_to @anteproyecto_estudiante, notice: 'Anteproyecto estudiante was successfully created.' }\n format.json { render :show, status: :created, location: @anteproyecto_estudiante }\n else\n format.html { render :new }\n format.json { render json: @anteproyecto_estudiante.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\r\n @sivic_alunoaula = SivicAlunoaula.new(sivic_alunoaula_params)\r\n\r\n respond_to do |format|\r\n if @sivic_alunoaula.save\r\n format.html { redirect_to @sivic_alunoaula, notice: 'Sivic alunoaula was successfully created.' }\r\n format.json { render action: 'show', status: :created, location: @sivic_alunoaula }\r\n else\r\n format.html { render action: 'new' }\r\n format.json { render json: @sivic_alunoaula.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n @estacionamiento = Estacionamiento.new(params[:estacionamiento])\n\n respond_to do |format|\n if @estacionamiento.save\n format.html { redirect_to @estacionamiento, notice: 'Estacionamiento was successfully created.' }\n format.json { render json: @estacionamiento, status: :created, location: @estacionamiento }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estacionamiento.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @cliente = Cliente.new\n if params[:type] == \"PessoaFisica\"\n @cliente.pessoa = PessoaFisica.new\n else\n @cliente.pessoa = PessoaJuridica.new\n end\n @cliente.assign_attributes(cliente_params)\n respond_to do |format|\n if @cliente.save\n format.html { redirect_to action: \"index\"}\n format.json { render json: @cliente.to_json(include: [:pessoa]) }\n else\n format.html { render :new }\n format.json { render json: @cliente.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sindicato = Sindicato.new(sindicato_params)\n\n respond_to do |format|\n if @sindicato.save\n format.html { redirect_to @sindicato, notice: 'Sindicato fue creado exitosamente.' }\n format.json { render :show, status: :created, location: @sindicato }\n else\n format.html { render :new }\n format.json { render json: @sindicato.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\r\n @sivic_celula = SivicCelula.new(sivic_celula_params)\r\n\r\n respond_to do |format|\r\n if @sivic_celula.save\r\n format.html { redirect_to @sivic_celula, notice: 'Registro inserido com sucesso.' }\r\n format.json { render action: 'show', status: :created, location: @sivic_celula }\r\n else\r\n format.html { render action: 'new' }\r\n format.json { render json: @sivic_celula.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n @suceso_perro = SucesoPerro.new(suceso_perro_params)\n\n respond_to do |format|\n if @suceso_perro.save\n format.html { redirect_to @suceso_perro, notice: 'Suceso perro was successfully created.' }\n format.json { render :show, status: :created, location: @suceso_perro }\n else\n format.html { render :new }\n format.json { render json: @suceso_perro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\r\n\r\n respond_to do |format|\r\n if @sepi_programa.save\r\n format.html { redirect_to @sepi_programa, notice: 'Se añadió un programa de SEPI correctamente.' }\r\n format.json { render :show, status: :created, location: @sepi_programa }\r\n else\r\n format.html { render :new }\r\n format.json { render json: @sepi_programa.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n seleccionarMenu(:juzgados)\n @juzgado = Juzgado.new(params[:juzgado])\n\n respond_to do |format|\n if @juzgado.save\n format.html { redirect_to @juzgado, notice: 'Juzgado fue creado satisfactoriamente.' }\n format.json { render json: @juzgado, status: :created, location: @juzgado }\n else\n format.html { render action: \"new\" }\n format.json { render json: @juzgado.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @asiento_de_servicio = AsientoDeServicio.new(asiento_de_servicio_params)\n\n respond_to do |format|\n if @asiento_de_servicio.save\n format.html { redirect_to @asiento_de_servicio, notice: 'Asiento de servicio was successfully created.' }\n format.json { render :show, status: :created, location: @asiento_de_servicio }\n else\n format.html { render :new }\n format.json { render json: @asiento_de_servicio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @seccion = Seccion.new(params[:seccion])\n\n respond_to do |format|\n if @seccion.save\n format.html { redirect_to(@seccion, :notice => 'Seccion was successfully created.') }\n format.xml { render :xml => @seccion, :status => :created, :location => @seccion }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @seccion.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @veiculo = Veiculo.new(params[:veiculo])\n\n respond_to do |format|\n if @veiculo.save\n format.html { redirect_to @veiculo, :notice => 'Veiculo was successfully created.' }\n format.json { render :json => @veiculo, :status => :created, :location => @veiculo }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @veiculo.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | [
"0.6663375",
"0.65300363",
"0.6406342",
"0.6395286",
"0.6338467",
"0.63044035",
"0.6278391",
"0.6276684",
"0.6267618",
"0.62545985",
"0.62474227",
"0.6229199",
"0.6219681",
"0.6217479",
"0.62123716",
"0.62107056",
"0.6206724",
"0.6206048",
"0.619697",
"0.6191094",
"0.61886233",
"0.6183872",
"0.6161408",
"0.61571366",
"0.6131791",
"0.61151755",
"0.610261",
"0.610014",
"0.6083125",
"0.6054267",
"0.60211277",
"0.6018956",
"0.59973055",
"0.59853256",
"0.59739965",
"0.59737605",
"0.59714705",
"0.59671235",
"0.59600747",
"0.59496903",
"0.59469074",
"0.59460056",
"0.5938238",
"0.5937732",
"0.5934514",
"0.5928922",
"0.5926025",
"0.5924986",
"0.5924781",
"0.5915974",
"0.589734",
"0.5884258",
"0.587535",
"0.58751976",
"0.5873716",
"0.5864476",
"0.5863764",
"0.586168",
"0.586066",
"0.5857256",
"0.58543146",
"0.585186",
"0.5846348",
"0.58419675",
"0.5840656",
"0.582944",
"0.5825524",
"0.58198225",
"0.58137846",
"0.5813646",
"0.5809976",
"0.58054274",
"0.5804931",
"0.57940644",
"0.57854533",
"0.5776854",
"0.57659835",
"0.5761048",
"0.5760486",
"0.5757867",
"0.5755493",
"0.5739979",
"0.573317",
"0.572221",
"0.57213426",
"0.5716662",
"0.5711064",
"0.5707353",
"0.57044727",
"0.5704433",
"0.57023025",
"0.5701761",
"0.56994474",
"0.56960034",
"0.56928587",
"0.56889737",
"0.56876713",
"0.5685971",
"0.56807137",
"0.5679828"
] | 0.6659651 | 1 |
PUT /selecaos/1 PUT /selecaos/1.json | def update
@selecao = Selecao.find(params[:id])
respond_to do |format|
if @selecao.update_attributes(params[:selecao])
format.html { redirect_to @selecao, notice: 'Selecao was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @selecao.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n @seguro = Seguro.find(params[:id])\n\n respond_to do |format|\n if @seguro.update_attributes(params[:seguro])\n format.html { redirect_to @seguro, notice: 'Seguro was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @seguro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @socio_serasa.update(socio_serasa_params)\n format.html { redirect_to @socio_serasa, notice: 'Socio serasa was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @socio_serasa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put!\n request! :put\n end",
"def update\n @soiree = Soiree.find(params[:id])\n\n respond_to do |format|\n if @soiree.update_attributes(params[:soiree])\n format.html { redirect_to @soiree, notice: 'Soiree was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @soiree.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @socio = Socio.find(params[:id])\n\n respond_to do |format|\n if @socio.update_attributes(params[:socio])\n format.html { redirect_to @socio, :notice => 'Socio atualizado com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @socio.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @osoba = Osoba.find(params[:id])\n\n if @osoba.update(params[:osoba])\n head :no_content\n else\n render json: @osoba.errors, status: :unprocessable_entity\n end\n end",
"def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def update\n @estoque = Estoque.find(params[:id])\n\n respond_to do |format|\n if @estoque.update_attributes(params[:estoque])\n format.html { redirect_to @estoque, notice: 'Estoque was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @estoque.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @seguidore = Seguidore.find(params[:id])\n\n respond_to do |format|\n if @seguidore.update_attributes(params[:seguidore])\n format.html { redirect_to @seguidore, notice: 'Seguidore was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @seguidore.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @soatseguro.update(soatseguro_params)\n format.html { redirect_to @soatseguro, notice: 'Soatseguro was successfully updated.' }\n format.json { render :show, status: :ok, location: @soatseguro }\n else\n format.html { render :edit }\n format.json { render json: @soatseguro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @oase = Oasis.find(params[:id])\n\n respond_to do |format|\n if @oase.update_attributes(params[:oase])\n format.html { redirect_to @oase, notice: 'Oasis was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @oase.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update(url, data)\n RestClient.put url, data, :content_type => :json\nend",
"def update\n respond_to do |format|\n if @inventario_cosa.update(inventario_cosa_params)\n format.html { redirect_to @inventario_cosa, notice: 'Inventario cosa was successfully updated.' }\n format.json { render :show, status: :ok, location: @inventario_cosa }\n else\n format.html { render :edit }\n format.json { render json: @inventario_cosa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put(*args)\n request :put, *args\n end",
"def update\n respond_to do |format|\n if @seo.update(seo_params)\n format.html { redirect_to @seo, notice: 'Seo was successfully updated.' }\n format.json { render :show, status: :ok, location: @seo }\n else\n format.html { render :edit }\n format.json { render json: @seo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @sintoma.update(sintoma_params)\n format.html { redirect_to @sintoma, notice: 'Sintoma was successfully updated.' }\n format.json { render :show, status: :ok, location: @sintoma }\n else\n format.html { render :edit }\n format.json { render json: @sintoma.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @sinistro.update(sinistro_params)\n format.html { redirect_to @sinistro, notice: 'Sinistro was successfully updated.' }\n format.json { render :show, status: :ok, location: @sinistro }\n else\n format.html { render :edit }\n format.json { render json: @sinistro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n @salle = Salle.find(params[:id])\r\n\r\n respond_to do |format|\r\n if @salle.update_attributes(params[:salle])\r\n format.html { redirect_to @salle, notice: 'Salle was successfully updated.' }\r\n format.json { head :ok }\r\n else\r\n format.html { render action: \"edit\" }\r\n format.json { render json: @salle.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update\n @escola = Escola.find(params[:id])\n\n respond_to do |format|\n if @escola.update_attributes(params[:escola])\n format.html { redirect_to @escola, :notice => 'Escola was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @escola.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n client=Client.find_by_id params[:id]\n if client!= nil\n client.cedula=params[:cedula] ? params[:cedula]: client.cedula\n client.sector=params[:sector] ? params[:sector]: client.sector\n client.nombre=params[:nombre] ? params[:nombre]: client.nombre\n client.telefono=params[:telefono] ? params[:telefono]: client.telefono\n client.pagina=params[:pagina] ? params[:pagina]: client.pagina\n client.direccion=params[:direccion] ? params[:direccion]: client.direccion\n if client.save\n render(json: client, status: 201)\n end \n else\n render(json: client.errors, status: 404)\n end \n end",
"def update\n respond_to do |format|\n if @sekilas_info.update(sekilas_info_params)\n format.html { redirect_to @sekilas_info, notice: 'Sekilas info was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @sekilas_info.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @socio = Socio.find(params[:id])\n\n respond_to do |format|\n if @socio.update_attributes(params[:socio])\n format.html { redirect_to @socio, notice: 'Socio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @socio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @secco.update(secco_params)\n format.html { redirect_to curso_secco_path(@curso, @secco), notice: 'Seccao was successfully updated.' }\n format.json { render :show, status: :ok, location: @secco }\n else\n format.html { render :edit }\n format.json { render json: @secco.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put(*args)\n request(:put, *args)\n end",
"def update_aos_version(args = {}) \n id = args['id']\n temp_path = \"/aosversions.json/{aosVersionId}\"\n path = temp_path\nargs.keys.each do |key|\n if (key == \"aosversionId\")\n args.delete(key)\n path = temp_path.gsub(\"{#{key}}\", id)\n end\nend\n puts \" PATH : #{path}\"\n put(path, args)\nend",
"def update\n @sezione = Sezione.find(params[:id])\n\n respond_to do |format|\n if @sezione.update_attributes(params[:sezione])\n format.html { redirect_to elencosezioni_path, notice: 'Sezione was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sezione.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @servicio = Servicio.find(params[:id])\n\n respond_to do |format|\n if @servicio.update_attributes(params[:servicio])\n format.html { redirect_to @servicio, :notice => 'Servicio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @servicio.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @estatuto = Estatuto.find(params[:id])\n\n respond_to do |format|\n if @estatuto.update_attributes(params[:estatuto])\n format.html { redirect_to @estatuto, notice: 'Estatuto was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @estatuto.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @socio.update(socio_params)\n format.html { redirect_to @socio, notice: 'Socio modificado com sucesso.' }\n format.json { render :show, status: :ok, location: @socio }\n else\n format.html { render :edit }\n format.json { render json: @socio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n respond_to do |format|\r\n if @sivic_celula.update(sivic_celula_params)\r\n format.html { redirect_to @sivic_celula, notice: 'Registro alterado com sucesso.' }\r\n format.json { head :no_content }\r\n else\r\n format.html { render action: 'edit' }\r\n format.json { render json: @sivic_celula.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update\n @sistema = Sistema.find(params[:id])\n\n respond_to do |format|\n if @sistema.update_attributes(params[:sistema])\n format.html { redirect_to @sistema, notice: 'Sistema was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sistema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @sevice.update(sevice_params)\n format.html { redirect_to @sevice, notice: 'Sevice was successfully updated.' }\n format.json { render :show, status: :ok, location: @sevice }\n else\n format.html { render :edit }\n format.json { render json: @sevice.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n respond_to do |format|\r\n if @sivic_pessoa.update(sivic_pessoa_params)\r\n format.html { redirect_to @sivic_pessoa, notice: 'Registro alterado com sucesso.' }\r\n format.json { head :no_content }\r\n else\r\n format.html { render action: 'edit' }\r\n format.json { render json: @sivic_pessoa.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def set_soatseguro\n @soatseguro = Soatseguro.find(params[:id])\n end",
"def update\n @asesor = Asesor.find(params[:id])\n\n respond_to do |format|\n if @asesor.update_attributes(params[:asesor])\n format.html { redirect_to @asesor, notice: 'Asesor was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @asesor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def actualizacion \n fiesta.update (params[:id]) \n render json: fiesta\n end",
"def update\n respond_to do |format|\n if @sivic_discipulo.update(sivic_discipulo_params_netested)\n format.html { redirect_to @sivic_discipulo, notice: 'Registro alterado com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @sivic_discipulo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @lab_seminar.update(lab_seminar_params)\n format.html { redirect_to @lab_seminar, notice: 'Seminar was successfully updated.' }\n format.json { render :show, status: :ok, location: @lab_seminar }\n else\n format.html { render :edit }\n format.json { render json: @lab_seminar.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @serie = Serie.find(params[:id])\n\n respond_to do |format|\n if @serie.update_attributes(params[:serie])\n format.html { redirect_to(niveis_ensino_serie_url(@nivel,@serie), :notice => 'Serie atualizado com sucesso.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @serie.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @solicitud_servicio = SolicitudServicio.find(params[:id])\n\n respond_to do |format|\n if @solicitud_servicio.update_attributes(params[:solicitud_servicio])\n format.html { redirect_to @solicitud_servicio, notice: 'Solicitud servicio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @solicitud_servicio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @salle.update(salle_params)\n format.html { redirect_to salles_url, notice: 'Salle was successfully updated.' }\n format.json { render :show, status: :ok, location: @salle }\n else\n format.html { render :edit }\n format.json { render json: @salle.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @socio.update(socio_params)\n format.html { redirect_to @socio, notice: 'Socio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @socio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_situacao\n logger.debug \"ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ\"\n\n id_busca = params[:id]\n @os_id = params[:os_id]\n @os_tarefa = OsTarefa.find(id_busca)\n @os_tarefa.situacao=params[:situacao]\n @ordem_servico = OrdemServico.find(@os_id)\n logger.debug \"ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ\"\n\n if @os_tarefa.situacao=='REJEITADA'\n @os_tarefa.ordem_servico_pagamento= nil\n @os_tarefa.situacao=OsTarefa.situacoes[2]\n else\n @os_tarefa.ordem_servico_pagamento= @ordem_servico\n @os_tarefa.situacao=OsTarefa.situacoes[0]\n end\n @os_tarefa.save\n respond_to do |format|\n\n format.json { head :no_content }\n format.js { render :layout => false }\n\n end\n end",
"def put payload, path = \"\"\n make_request(path, \"put\", payload)\n end",
"def update\n respond_to do |format|\n if @osusume.update(osusume_params)\n format.html { redirect_to @osusume, notice: 'Osusume was successfully updated.' }\n format.json { render :show, status: :ok, location: @osusume }\n else\n format.html { render :edit }\n format.json { render json: @osusume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update options={}\n client.put(\"/#{id}\", options)\n end",
"def update\n @seccion = Seccion.find(params[:id])\n\n respond_to do |format|\n if @seccion.update_attributes(params[:seccion])\n format.html { redirect_to(@seccion, :notice => 'Seccion was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @seccion.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @seminar.update(seminar_params)\n format.html { redirect_to @seminar, notice: \"Seminar was successfully updated.\" }\n format.json { render :show, status: :ok, location: @seminar }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @seminar.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(params)\n res = @client.put(path, nil, params, \"Content-Type\" => \"application/json\")\n @attributes = res.json if res.status == 201\n res\n end",
"def update\n @sugerencia = Sugerencia.find(params[:id])\n\n respond_to do |format|\n if @sugerencia.update_attributes(params[:sugerencia])\n format.html { redirect_to @sugerencia, :notice => 'Sugerencia was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @sugerencia.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n if @spice.update(spice_params)\n head :no_content\n else\n render json: @spice.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @semestre.update(semestre_params)\n format.html { redirect_to semestres_url, notice: 'Semestre was successfully updated.' }\n format.json { render :show, status: :ok, location: @semestre }\n else\n format.html { render :edit }\n format.json { render json: @semestre.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @documentos_simposio.update(documentos_simposio_params)\n format.html { redirect_to @documentos_simposio, notice: 'Documentos simposio was successfully updated.' }\n format.json { render :show, status: :ok, location: @documentos_simposio }\n else\n format.html { render :edit }\n format.json { render json: @documentos_simposio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @sejour.update(sejour_params)\n format.html { redirect_to @sejour, notice: 'Sejour mis a jour.' }\n format.json { render :show, status: :ok, location: @sejour }\n else\n format.html { render :edit }\n format.json { render json: @sejour.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @veiculo = Veiculo.find(params[:id])\n\n respond_to do |format|\n if @veiculo.update_attributes(params[:veiculo])\n format.html { redirect_to @veiculo, :notice => 'Veiculo was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @veiculo.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @soiree.update(soiree_params)\n format.html { redirect_to @soiree, notice: 'Votre évènement a bien été mis à jour.' }\n format.json { render :show, status: :ok, location: @soiree }\n else\n format.html { render :edit }\n format.json { render json: @soiree.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_seccao\n @secco = Secco.find(params[:id])\n end",
"def update\n respond_to do |format|\n if @semestre.update(semestre_params)\n format.html { redirect_to @semestre, notice: 'Semestre was successfully updated.' }\n format.json { render :show, status: :ok, location: @semestre }\n else\n format.html { render :edit }\n format.json { render json: @semestre.errors, status: :unprocessable_entity }\n end\n end\n end",
"def suscribe\n @estate = Estate.find(params[:id])\n @estate.update_attribute(:status, true)\n respond_to do |format|\n format.html { redirect_to estates_url, notice: 'Propiedad publicada exitosamente.' }\n format.json { head :no_content }\n end\n end",
"def update\n respond_to do |format|\n if @solicitud.update(solicitud_params)\n format.html { redirect_to @solicitud, notice: 'Solicitud was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @solicitud.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @sesion = Sesion.where(entidad_paraestatal_id: @entidad_paraestatal.id).find(params[:id])\n\n respond_to do |format|\n if @sesion.update_attributes(params[:sesion])\n format.html { redirect_to [@entidad_paraestatal,@sesion], notice: 'Sesion was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sesion.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @solicitante.update(solicitante_params)\n format.html { redirect_to @solicitante, notice: 'Solicitante was successfully updated.' }\n format.json { render :show, status: :ok, location: @solicitante }\n else\n format.html { render :edit }\n format.json { render json: @solicitante.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @souvenior.update(souvenior_params)\n format.html { redirect_to @souvenior, notice: 'Souvenior was successfully updated.' }\n format.json { render :show, status: :ok, location: @souvenior }\n else\n format.html { render :edit }\n format.json { render json: @souvenior.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n @sucursale.usuarios_id = current_usuario.id\n if @sucursale.update(sucursale_params)\n format.html { redirect_to @sucursale, notice: 'Sucursal actualizada con exito!' }\n format.json { render :show, status: :ok, location: @sucursale }\n else\n format.html { render :edit }\n format.json { render json: @sucursale.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put(path, data = {})\n request 'PUT', path, body: data.to_json\n end",
"def update\n @sabio = Sabio.find(params[:id])\n\n respond_to do |format|\n if @sabio.update_attributes(params[:sabio])\n format.html { redirect_to @sabio, notice: 'El Sabio fue actualizado.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sabio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put(id, json)\n with_endpoint do |endpoint|\n url = [endpoint, @resource_name, id].compact.join('/')\n url += \"/\" \n return HTTParty.put(url, :body => json, :timeout => 4, :headers => { 'Content-Type' => 'application/json' })\n end\n end",
"def set_socio_serasa\n @socio_serasa = SocioSerasa.find(params[:id])\n end",
"def set_sesiune\n @sesiune = Sesiune.find(params[:id])\n end",
"def update\n @sitio = Sitio.find(params[:id])\n\n respond_to do |format|\n if @sitio.update_attributes(params[:sitio])\n format.html { redirect_to @sitio, notice: 'Sitio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @sitio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def save(request)\n raise ArgumentError, \"PUT does not accept options\" unless request.options.empty?\n update(request) || create(request)\n end",
"def update\n respond_to do |format|\n if @<%= singular_table_name %>.update(<%= singular_table_name %>_params)\n format.html { redirect_to @<%= singular_table_name %>, notice: \"#{t('activerecord.models.<%= singular_table_name %>.one')} atualizado com sucesso.\" }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @<%= singular_table_name %>.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n params.permit!\n @silo = Silo.find(params[:id])\n\n respond_to do |format|\n if @silo.update_attributes(params[:silo])\n format.html { redirect_to(@silo, :notice => 'Silo was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @silo.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @estagio = Estagio.find(params[:id])\n\n respond_to do |format|\n if @estagio.update_attributes(params[:estagio])\n flash[:notice] = 'Estagio was successfully updated.'\n format.html { redirect_to(@estagio) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @estagio.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @spaethi = Spaethi.find(params[:id])\n\n respond_to do |format|\n if @spaethi.update_attributes(params[:spaethi])\n format.html { redirect_to @spaethi, notice: 'Spaethi was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @spaethi.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @solicitud.update(solicitud_params)\n format.html { redirect_to @solicitud, notice: 'Solicitud was successfully updated.' }\n format.json { render :show, status: :ok, location: @solicitud }\n else\n format.html { render :edit }\n format.json { render json: @solicitud.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @asiento = Asiento.find(params[:id])\n\n respond_to do |format|\n if @asiento.update_attributes(params[:asiento])\n format.html { redirect_to @asiento, :notice => 'El apunte fue cambiado.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @asiento.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @coordenador_estagio = CoordenadorEstagio.find(params[:id])\n\n respond_to do |format|\n if @coordenador_estagio.update_attributes(params[:coordenador_estagio])\n format.html { redirect_success(\"Coordenador alterado com sucesso!\",:coordenador_estagio, :index)}\n format.json { head :no_content }\n else\n format.html { redirect_error(\"Erro ao alterar coordenador!\",:coordenador_estagio, :index)}\n format.json { render json: @coordenador_estagio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @eou = Eou.find(params[:id])\n\n respond_to do |format|\n if @eou.update_attributes(params[:eou])\n format.html { redirect_to @eou, :notice => 'Eou was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @eou.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\r\n @asistencia = Asistencia.find(params[:id])\r\n\r\n respond_to do |format|\r\n if @asistencia.update_attributes(params[:asistencia])\r\n format.html { redirect_to @asistencia, notice: 'Asistencia was successfully updated.' }\r\n format.json { head :no_content }\r\n else\r\n format.html { render action: \"edit\" }\r\n format.json { render json: @asistencia.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update\n @ativo_outro = AtivoOutro.find(params[:id])\n\n respond_to do |format|\n if @ativo_outro.update_attributes(params[:ativo_outro])\n format.html { redirect_to @ativo_outro, notice: 'Ativo foi salvo com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ativo_outro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put(*args)\n prepare_request(:put, args)\n @@client.add(:put, @path, *args)\n end",
"def set_sesione\n @sesione = Sesione.find(params[:id])\n end",
"def update\n authorize! :update_almacen,Sigesp::Solicitud\n if @sigesp_solicitud.update(sigesp_solicitud_alamcen_params)\n return render json: { url: sigesp_solicitudsalmacen_path(@sigesp_solicitud)} \n else\n return render json:@sigesp_solicitud.errors ,status: :unprocessable_entity\n end \n end",
"def update\n @cargo_eleicao = CargoEleicao.find(params[:id])\n\n respond_to do |format|\n if @cargo_eleicao.update_attributes(params[:cargo_eleicao])\n format.html { redirect_to @cargo_eleicao, notice: 'Cargo eleicao was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @cargo_eleicao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @solicitud.update(solicitud_params)\n format.html { redirect_to @solicitud, notice: \"Solicitud was successfully updated.\" }\n format.json { render :show, status: :ok, location: @solicitud }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @solicitud.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @soa_section.update(soa_section_params.merge({serial_number: @soa_section.serial_number += 1, revision: @soa_section.revision += 1 }))\n format.html { redirect_to domain_dns_zone_path(@soa_section.dns_zone.domain, @soa_section.dns_zone), notice: 'Soa section was successfully updated.' }\n format.json { head :no_content }\n else\n format.html do\n flash[:alert] = \"SoaSection validation failed: #{@soa_section.errors.messages}\"\n render action: :edit\n end\n format.json { render json: @soa_section.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @ginasio = Ginasio.find(params[:id])\n\n respond_to do |format|\n if @ginasio.update_attributes(params[:ginasio])\n format.html { redirect_to @ginasio, :flash => { :success => 'Dados do ginasio alterados com successo!' } }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @ginasio.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @nossos_servico = NossosServico.find(params[:id])\n\n respond_to do |format|\n if @nossos_servico.update_attributes(params[:nossos_servico])\n format.html { redirect_to(@nossos_servico, :notice => 'Nossos servico was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @nossos_servico.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def put(path, params = {})\n request(:put, path, params)\n end",
"def put(path, params = {})\n request(:put, path, params)\n end",
"def put(path, params = {})\n request(:put, path, params)\n end",
"def update\n respond_to do |format|\n if @segundo.update(segundo_params)\n format.html { redirect_to @segundo, notice: 'Segundo was successfully updated.' }\n format.json { render :show, status: :ok, location: @segundo }\n else\n format.html { render :edit }\n format.json { render json: @segundo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @safra_verdoso = SafraVerdoso.find(params[:id])\n\n respond_to do |format|\n if @safra_verdoso.update_attributes(params[:safra_verdoso])\n format.html { redirect_to \"/safra_produtos/#{@safra_verdoso.safra_produto_id}/descontos\"}\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @safra_verdoso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @saida.update(saida_params)\n format.html { redirect_to @saida, notice: 'Saida was successfully updated.' }\n format.json { render :show, status: :ok, location: @saida }\n else\n format.html { render :edit }\n format.json { render json: @saida.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @serie = Serie.find(params[:id])\n\n respond_to do |format|\n if @serie.update_attributes(serie_params)\n format.html { redirect_to(@serie, :notice => 'Serie was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @serie.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update \n retorno = {erro: \"322\" ,body: \"\"}\n if @usuario.update(valid_request?)\n retorno = {erro: \"000\", body: {evento_id: @usuario.id, usuario_nome: @usuario.nome}}\n end\n render json: retorno.to_json\n end",
"def update\n respond_to do |format|\n if @solicitacoes_avaliacoes_servico.update(solicitacoes_avaliacoes_servico_params)\n format.html { redirect_to @solicitacoes_avaliacoes_servico, notice: 'Solicitacoes avaliacoes servico was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @solicitacoes_avaliacoes_servico.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @seance = Seances::UseCases::Update.new.call(id: params[:id], params: seance_params)\n\n if @seance.valid?\n render jsonapi: @seance\n else\n render jsonapi_errors: @seance.errors, status: :unprocessable_entity\n end\n end",
"def put(path, body = nil, ctype = 'application/json')\n make_call(mk_conn(path, 'Content-Type': ctype,\n 'Accept': 'application/json'),\n :put, nil, body.to_json)\n end"
] | [
"0.6427689",
"0.6255403",
"0.61362725",
"0.6129032",
"0.60895616",
"0.6065918",
"0.6046543",
"0.6029601",
"0.60295624",
"0.6023039",
"0.60130554",
"0.60099554",
"0.6006212",
"0.59947383",
"0.59772074",
"0.59656155",
"0.5962535",
"0.5933297",
"0.59308934",
"0.59105206",
"0.5909519",
"0.58786964",
"0.5860092",
"0.5831077",
"0.5830613",
"0.5825473",
"0.5825004",
"0.5820966",
"0.58086354",
"0.57958066",
"0.5792555",
"0.57848793",
"0.5765346",
"0.5764775",
"0.5764636",
"0.5762301",
"0.5762301",
"0.57613295",
"0.5760737",
"0.57534695",
"0.5749193",
"0.57472205",
"0.5743609",
"0.57228106",
"0.57149035",
"0.57131284",
"0.5711638",
"0.57049894",
"0.5697415",
"0.56969213",
"0.5692629",
"0.56913054",
"0.56854224",
"0.568467",
"0.56753814",
"0.5673851",
"0.5671909",
"0.5663378",
"0.5662281",
"0.56621236",
"0.5657732",
"0.5654334",
"0.5647855",
"0.56469023",
"0.5646077",
"0.563924",
"0.56392056",
"0.5638945",
"0.56333727",
"0.56294173",
"0.5628673",
"0.56268746",
"0.5626826",
"0.562442",
"0.5624348",
"0.56182396",
"0.561195",
"0.5608791",
"0.56043327",
"0.55965126",
"0.5594577",
"0.5594564",
"0.55930966",
"0.55914307",
"0.5590671",
"0.5589651",
"0.55870074",
"0.5586733",
"0.5586404",
"0.55855393",
"0.55855393",
"0.55855393",
"0.55830437",
"0.5582428",
"0.5580351",
"0.55803025",
"0.55781317",
"0.55778956",
"0.55773586",
"0.5575988"
] | 0.634854 | 1 |
DELETE /selecaos/1 DELETE /selecaos/1.json | def destroy
@selecao = Selecao.find(params[:id])
@selecao.destroy
respond_to do |format|
format.html { redirect_to selecaos_url }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @seguro = Seguro.find(params[:id])\n @seguro.destroy\n\n respond_to do |format|\n format.html { redirect_to seguros_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @seguidore = Seguidore.find(params[:id])\n @seguidore.destroy\n\n respond_to do |format|\n format.html { redirect_to seguidores_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @escola = Escola.find(params[:id])\n @escola.destroy\n\n respond_to do |format|\n format.html { redirect_to escolas_url }\n format.json { head :no_content }\n end\n end",
"def delete\n client.delete(\"/#{id}\")\n end",
"def destroy\n @sezione = Sezione.find(params[:id])\n @sezione.destroy\n\n respond_to do |format|\n format.html { redirect_to sezioni_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @soiree = Soiree.find(params[:id])\n @soiree.destroy\n\n respond_to do |format|\n format.html { redirect_to soirees_url }\n format.json { head :no_content }\n end\n end",
"def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend",
"def destroy\n @estatuto = Estatuto.find(params[:id])\n @estatuto.destroy\n\n respond_to do |format|\n format.html { redirect_to estatutos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @soatseguro.destroy\n respond_to do |format|\n format.html { redirect_to soatseguros_url, notice: 'Soatseguro was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asignatura.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @cargo_eleicao = CargoEleicao.find(params[:id])\n @cargo_eleicao.destroy\n\n respond_to do |format|\n format.html { redirect_to cargo_eleicaos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @socio_serasa.destroy\n respond_to do |format|\n format.html { redirect_to socio_serasas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @solicitud.destroy\n respond_to do |format|\n format.html { redirect_to solicitudes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cliente.destroy\n respond_to do |format|\n format.html { redirect_to clientes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sintoma.destroy\n respond_to do |format|\n format.html { redirect_to sintomas_url, notice: 'Sintoma was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @sivic_celula.destroy\r\n respond_to do |format|\r\n format.html { redirect_to sivic_celulas_url }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @sejour.destroy\n respond_to do |format|\n format.html { redirect_to sejours_url, notice: 'Sejour was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cliente = Cliente.find(params[:id])\n @cliente.destroy\n\n respond_to do |format|\n format.html { redirect_to clientes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cliente = Cliente.find(params[:id])\n @cliente.destroy\n\n respond_to do |format|\n format.html { redirect_to clientes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cliente = Cliente.find(params[:id])\n @cliente.destroy\n\n respond_to do |format|\n format.html { redirect_to clientes_url }\n format.json { head :no_content }\n end\n end",
"def test_del\n header 'Content-Type', 'application/json'\n\n data = File.read 'sample-traces/0.json'\n post('/traces', data, 'CONTENT_TYPE': 'application/json')\n\n id = last_response.body\n\n delete \"/traces/#{id}\"\n assert last_response.ok?\n\n get \"/traces/#{id}\"\n\n contents = JSON.parse last_response.body\n assert_kind_of(Hash, contents, 'Response contents is not a hash')\n assert contents.key? 'description'\n assert(!last_response.ok?)\n end",
"def destroy\n @cliente = Cliente.find(params[:id])\n @cliente.destroy\n\n respond_to do |format|\n format.html { redirect_to clientes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @sesh.destroy\n respond_to do |format|\n format.html { redirect_to root_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ocorrencia.destroy\n respond_to do |format|\n format.html { redirect_to ocorrencias_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @prueba_json.destroy\n respond_to do |format|\n format.html { redirect_to prueba_jsons_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fulcliente = Fulcliente.find(params[:id])\n @fulcliente.destroy\n\n respond_to do |format|\n format.html { redirect_to fulclientes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @sekilas_info.destroy\n respond_to do |format|\n format.html { redirect_to sekilas_infos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sugerencia = Sugerencia.find(params[:id])\n @sugerencia.destroy\n\n respond_to do |format|\n format.html { redirect_to sugerencias_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @servicio = Servicio.find(params[:id])\n @servicio.destroy\n\n respond_to do |format|\n format.html { redirect_to servicios_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @seo.destroy\n respond_to do |format|\n format.html { redirect_to seos_url, notice: 'Seo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exura = Exura.find(params[:id])\n @exura.destroy\n\n respond_to do |format|\n format.html { redirect_to exuras_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sistema = Sistema.find(params[:id])\n @sistema.destroy\n\n respond_to do |format|\n format.html { redirect_to sistemas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @soiree.destroy\n respond_to do |format|\n format.html { redirect_to soirees_url, notice: 'Votre évènement a bien été supprimé.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @trein_consul_comercial.destroy\n respond_to do |format|\n format.html { redirect_to trein_consul_comercials_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @trabajador_seccion.destroy\n respond_to do |format|\n format.html { redirect_to trabajador_seccions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @colegio = Colegio.find(params[:id])\n @colegio.destroy\n\n respond_to do |format|\n format.html { redirect_to colegios_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sabio = Sabio.find(params[:id])\n @sabio.destroy\n\n respond_to do |format|\n format.html { redirect_to sabios_url }\n format.json { head :ok }\n end\n end",
"def destroy\r\n @sivic_contcelula.destroy\r\n respond_to do |format|\r\n format.html { redirect_to sivic_contcelulas_url }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @salle.destroy\n respond_to do |format|\n format.html { redirect_to salles_url, notice: 'Salle was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete\n render json: Alien.delete(params[\"id\"])\n end",
"def destroy\n @datosgenerale.destroy\n respond_to do |format|\n format.html { redirect_to datosgenerales_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @datos_insumos_reactivo.destroy\n respond_to do |format|\n format.html { redirect_to datos_insumos_reactivos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @segundo.destroy\n respond_to do |format|\n format.html { redirect_to segundos_url, notice: 'Segundo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @veiculo = Veiculo.find(params[:id])\n @veiculo.destroy\n\n respond_to do |format|\n format.html { redirect_to veiculos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asiento = Asiento.find(params[:id])\n @asiento.destroy\n\n respond_to do |format|\n format.html { redirect_to asientos_url }\n format.json { head :ok }\n end\n end",
"def destroy\r\n @sivic_relatorioscelula.destroy\r\n respond_to do |format|\r\n format.html { redirect_to sivic_relatorioscelulas_url }\r\n format.json { head :no_content }\r\n end\r\n end",
"def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def destroy\n @datos_estudiante.destroy\n respond_to do |format|\n format.html { redirect_to datos_estudiantes_url, notice: 'Datos estudiante was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @semestre.destroy\n respond_to do |format|\n format.html { redirect_to semestres_url, notice: 'Semestre was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @semestre.destroy\n respond_to do |format|\n format.html { redirect_to semestres_url, notice: 'Semestre was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @clientepedido = Clientepedido.find(params[:id])\n @clientepedido.destroy\n\n respond_to do |format|\n format.html { redirect_to clientepedidos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @servico_pacote.destroy\n respond_to do |format|\n format.html { redirect_to servico_pacotes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resa.destroy\n respond_to do |format|\n format.html { redirect_to resas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asos_datum.destroy\n respond_to do |format|\n format.html { redirect_to asos_data_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @respuesta = Respuesta.find(params[:id])\n @respuesta.destroy\n\n respond_to do |format|\n format.html { redirect_to respuestas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @solicitud.destroy\n respond_to do |format|\n format.html { redirect_to solicituds_url, notice: 'Solicitud was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @solicitud.destroy\n respond_to do |format|\n format.html { redirect_to solicituds_url, notice: \"Solicitud was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @clientes_servico.destroy\n respond_to do |format|\n format.html { redirect_to clientes_servicos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @safra_verdoso = SafraVerdoso.find(params[:id])\n @safra_verdoso.destroy\n\n respond_to do |format|\n format.html { redirect_to \"/safra_produtos/#{@safra_verdoso.safra_produto_id}/descontos\"}\n format.json { head :no_content }\n end\n end",
"def destroy\n @inventario_cosa.destroy\n respond_to do |format|\n format.html { redirect_to inventario_cosas_url, notice: 'Inventario cosa was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @curso.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @socio.destroy\n respond_to do |format|\n format.html { redirect_to socios_url, notice: 'Socio removido com sucesso.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @substancia.destroy\n respond_to do |format|\n format.html { redirect_to substancias_url, notice: 'Substancia was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete\n unless possui_acesso?()\n return\n end\n @aviso = Aviso.find(params[:id])\n @aviso.destroy\n\n respond_to do |format|\n format.html { redirect_to avisos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gestacao.destroy\n respond_to do |format|\n format.html { redirect_to gestacaos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sla.destroy\n respond_to do |format|\n format.html { redirect_to slas_url, notice: 'Sla fue exitosamente destruido.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @producto_seccion.destroy\n respond_to do |format|\n format.html { redirect_to producto_seccions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @nominee.destroy\n respond_to do |format|\n format.html { redirect_to nominees_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sinistro.destroy\n respond_to do |format|\n format.html { redirect_to sinistros_url, notice: 'Sinistro was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @chaine = Chaine.find(params[:id])\n @chaine.destroy\n\n respond_to do |format|\n format.html { redirect_to chaines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @humanidades1 = Humanidades1.find(params[:id])\n @humanidades1.destroy\n\n respond_to do |format|\n format.html { redirect_to humanidades1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @teste_anamnese.destroy\n respond_to do |format|\n format.html { redirect_to teste_anamneses_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @solicitante.destroy\n respond_to do |format|\n format.html { redirect_to solicitantes_url, notice: 'Solicitante was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @sivic_alunoaula.destroy\r\n respond_to do |format|\r\n format.html { redirect_to sivic_alunoaulas_url }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @detalle = Detalle.find(params[:id])\n @detalle.destroy\n\n respond_to do |format|\n format.html { redirect_to detalles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asociado = Asociado.find(params[:id])\n @asociado.destroy\n\n respond_to do |format|\n format.html { redirect_to asociados_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @sinh_vien = SinhVien.find(params[:id])\n @sinh_vien.destroy\n\n respond_to do |format| \n format.json { head :no_content }\n end\n end",
"def destroy\n @sivic_discipulo.destroy\n respond_to do |format|\n format.html { redirect_to sivic_discipulos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sotrudniki = Sotrudniki.find(params[:id])\n @sotrudniki.destroy\n\n respond_to do |format|\n format.html { redirect_to sotrudnikis_url }\n format.json { head :no_content }\n end\n end",
"def delete(path)\n RestClient.delete request_base+path\n end",
"def delete_json(path)\n url = [base_url, path].join\n resp = HTTParty.delete(url, headers: standard_headers)\n parse_json(url, resp)\n end",
"def destroy\n @asignatura = Asignatura.find(params[:id])\n @asignatura.destroy\n\n respond_to do |format|\n format.html { redirect_to asignaturas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asignatura = Asignatura.find(params[:id])\n @asignatura.destroy\n\n respond_to do |format|\n format.html { redirect_to asignaturas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tapioca.destroy\n respond_to do |format|\n format.html { redirect_to tapiocas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sitemenu.destroy\n respond_to do |format|\n format.html { redirect_to sitemenus_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @testis = Teste.find(params[:id])\n @testis.destroy\n\n respond_to do |format|\n format.html { redirect_to testes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @coisa = Coisa.find(params[:id])\n @coisa.destroy\n\n respond_to do |format|\n format.html { redirect_to coisas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tipoapreensao.destroy\n respond_to do |format|\n format.html { redirect_to tipoapreensoes_url }\n format.json { head :no_content }\n end\n end",
"def delete path\n make_request(path, \"delete\", {})\n end",
"def delete\n render :json => @fiestas.delete_at(params[:id].to_i)\n end",
"def destroy\n @caixa = Caixa.find(params[:id])\n @caixa.destroy\n\n respond_to do |format|\n format.html { redirect_to caixas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @solicitud_servicio = SolicitudServicio.find(params[:id])\n @solicitud_servicio.destroy\n\n respond_to do |format|\n format.html { redirect_to solicitudes_servicios_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cliente.destroy\n respond_to do |format|\n format.html { redirect_to clientes_url, notice: 'Cliente removido com sucesso.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @documentos_simposio.destroy\n #respond_to do |format|\n # format.html { redirect_to documentos_simposios_url, notice: 'Documentos simposio was successfully destroyed.' }\n # format.json { head :no_content }\n #end\n end",
"def destroy\n @odontologia1 = Odontologia1.find(params[:id])\n @odontologia1.destroy\n\n respond_to do |format|\n format.html { redirect_to odontologia1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @unidad.destroy\n respond_to do |format|\n format.html { redirect_to unidades_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sindicato.destroy\n respond_to do |format|\n format.html { redirect_to sindicatos_url, notice: 'Sindicato fue eliminado exitosamente.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @simulado.destroy\n respond_to do |format|\n format.html { redirect_to simulados_url, notice: 'Simulado was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @uchronia = Uchronia.find(params[:id])\n @uchronia.destroy\n\n respond_to do |format|\n format.html { redirect_to uchronias_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @estadia.destroy\n respond_to do |format|\n format.html { redirect_to estadia_url, notice: 'Estadia was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] | [
"0.71521413",
"0.7094811",
"0.70051724",
"0.69976777",
"0.69643164",
"0.6921676",
"0.6911437",
"0.6901799",
"0.6891904",
"0.6860565",
"0.683041",
"0.6826397",
"0.68180317",
"0.68075925",
"0.68063647",
"0.68016326",
"0.68003345",
"0.67889255",
"0.67889255",
"0.67889255",
"0.6785692",
"0.6776927",
"0.6773999",
"0.677348",
"0.67694676",
"0.67677385",
"0.6761245",
"0.67572707",
"0.6751306",
"0.67492115",
"0.6746993",
"0.674508",
"0.6733378",
"0.6731346",
"0.67275095",
"0.67270863",
"0.67268705",
"0.6719632",
"0.6716234",
"0.6713474",
"0.67082256",
"0.6707381",
"0.67069346",
"0.67006403",
"0.6695907",
"0.66945064",
"0.669302",
"0.6692847",
"0.66908294",
"0.66908294",
"0.66900575",
"0.6688865",
"0.668877",
"0.6688445",
"0.66869277",
"0.6685488",
"0.66851765",
"0.668005",
"0.66739553",
"0.6673423",
"0.6671031",
"0.66683453",
"0.6667024",
"0.6664506",
"0.66629726",
"0.6660353",
"0.66598177",
"0.66587234",
"0.6653659",
"0.66514826",
"0.6649336",
"0.66488564",
"0.66485524",
"0.6648519",
"0.6647266",
"0.6646515",
"0.664448",
"0.6643271",
"0.66432196",
"0.664264",
"0.66406155",
"0.6638993",
"0.6638993",
"0.6638478",
"0.66374344",
"0.66374075",
"0.6636008",
"0.66316646",
"0.66286767",
"0.662782",
"0.6627175",
"0.66255105",
"0.662546",
"0.66173595",
"0.66125464",
"0.6610242",
"0.661005",
"0.6605756",
"0.6604781",
"0.6604128"
] | 0.71586585 | 0 |
events [array] of objects [Google::Apis::CalendarV3::Event] | def call(events)
events.each { |event| Rails.configuration.gcal_service.insert_event('primary', event) }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _convert_events(events)\n g_events = []\n events.each do |event|\n source = Google::Apis::CalendarV3::Event::Source.new(\n title: @cfg['calendar']['eventSource']['title'],\n url: @cfg['calendar']['eventSource']['url'],\n )\n g_event = Google::Apis::CalendarV3::Event.new(\n summary: event[:summary],\n location: event[:location],\n description: event[:description],\n start: _convert_to_calendar_time(event[:time]),\n end: _convert_to_calendar_time(event[:ends]),\n source: source,\n )\n g_events << g_event\n end\n g_events\n end",
"def events\n response = self.class.get('/v1/events.json')\n response.code == 200 ? JSON.parse(response.body) : nil\n end",
"def fetch_events\n params = {'calendarId' => CONFIG[:cal_id], \n 'orderBy' => 'startTime',\n #'timeMax' => Time.utc(CONFIG[:year].to_i + 1, 4, 1).iso8601, \n #'timeMin' => Time.utc(CONFIG[:year].to_i, 4, 1).iso8601,\n 'singleEvents' => 'True'}\n \n result = @client.execute(:api_method => @cal.events.list, :parameters => params)\n\n @events_list = []\n result.data.items.each do |item|\n @events_list << item\n end\n end",
"def events\n @events ||= []\n @events\n end",
"def events\n results = @client.execute(\n :api_method => @calendar.events.list,\n :authenticated => false,\n :parameters => {\n 'calendarId' => @config[:calendar_id],\n 'fields' => 'items(start,end,summary)',\n 'singleEvents' => true,\n 'orderBy' => 'startTime',\n 'timeMin' => DateTime.now.to_s,\n 'timeMax' => (DateTime.now + 7).to_s,\n 'q' => 'LIVE'\n }\n )\n\n results.data.items.map do |event|\n summary = event.summary.gsub(/^LIVE:\\s+/, '')\n CalendarEvent.new(summary, event.start.date_time, event.end.date_time)\n end\n end",
"def events(param = nil)\n request = new_request Net::HTTP::Report do |request|\n request.body = CalendarQuery.new.event(param).to_xml\n end\n response = perform_request request\n \n events = []\n \n body = Nokogiri::XML.parse(response.body)\n namespaces = { 'dav' => \"DAV:\", 'caldav' => 'urn:ietf:params:xml:ns:caldav' }\n \n body.search(\"./dav:multistatus/dav:response\", namespaces).each do |element|\n calendar_data = element.search(\"./dav:propstat/dav:prop/caldav:calendar-data\", namespaces)\n calendar = Icalendar::Parser.new(calendar_data.text).parse.first\n calendar.events.each do |event|\n event.caldav = {\n :etag => element.search(\"dav:propstat/dav:prop/dav:getetag\", namespaces).text, \n :href => element.search(\"dav:href\", namespaces).text\n }\n events += calendar.events\n end\n end\n \n events\n end",
"def events\n results = @cal_service.list_events(\n @calendar_id,\n order_by: 'startTime',\n q: 'LIVE',\n single_events: true,\n time_max: (DateTime.now + 7).to_s,\n time_min: DateTime.now.to_s,\n fields: 'items(start,end,summary)',\n )\n\n results.data.items.map do |event|\n summary = event.summary.gsub(/^LIVE:\\s+/, '')\n CalendarEvent.new(summary, event.start.date_time, event.end.date_time)\n end\n end",
"def events #:nodoc:\n components Icalendar::Vevent\n end",
"def events(events, opts = {})\n data, _status_code, _headers = events_with_http_info(events, opts)\n return data\n end",
"def extract_events(calendar)\n events = []\n rep_until = Date.today >> MONTHS_ADVANCE_FOR_REPATING\n\n calendar.events.each do |event_entry|\n # for performance reasons, create event once and then clone per occurrence\n event = Event.build_from(Adapter.new(event_entry))\n occurences = event_entry.occurrences(:before => rep_until) \n # again, for performance and api limit reasons, get geo coordinates for repeating\n # events only once\n event.get_geo_coordinates #if occurences.size > 1\n \n occurences.each do |ical_event|\n adapter = Adapter.new(ical_event)\n occurrence = event.clone\n occurrence.starts_at = adapter.starts_at\n occurrence.ends_at = adapter.ends_at\n events << occurrence\n end\n end\n \n @log.info \"got #{events.size} events\"\n events\n end",
"def events=(value)\n @events = value\n end",
"def events(*events)\n @events ||= nil\n @events = events if events.present?\n @events\n end",
"def events_for_objects(calendar_events)\n groups = calendar_events.group_by { |r| r.attributes.slice(*GROUP_ATTRIBS) }\n groups.map do |_, members|\n Event.new(basic_event_attribs(members[0]).merge(\n location: members.map(&:location_name).join(\" + \")\n ))\n end\n end",
"def calendar_events(calendar_id)\n records \"/calendars/#{calendar_id}/calendar_events.xml\", :method => :get\n end",
"def get_events\n Resources::Event.parse(request(:get, \"Events\"))\n end",
"def fetch_events(calendar_id)\n response = service.list_events(calendar_id,\n max_results: 5,\n single_events: true,\n order_by: \"startTime\",\n time_min: Time.now.iso8601,\n time_max: Date.today.+(1).to_time.iso8601)\n\n # filter out any declined events – they normally represent a clash or room release\n response.items.reject { |event|\n next if event.attendees.nil?\n event.attendees.all? { |attendee| attendee.response_status == \"declined\" }\n }\n end",
"def upcoming_events(calendar)\n raise(ArgumentError, \"A calendar is required.\") unless \n calendar.respond_to?(:events)\n events = []\n now=Time.now\n calendar.events.each do |event|\n if event.recurrence_id\n # Do not collect explicit recurrences, instead\n # enumerate them with RiCal in the next `elsif` clause.\n elsif event.recurs?\n events.concat(\n event.occurrences(\n starting: now - 2.hours, \n count: EVENT_COUNT_LIMIT\n )\n )\n else\n events << event\n end\n end\n events = events.select {|event| event.dtend > now }\n events = events.sort_by {|event| event.dtstart }\n events\n end",
"def get_events\n events = [] \n @log ||= Rails.logger\n Appsterdam::Application.ical_subscriptions.each do |options|\n @log.info \"getting events from #{options[:url]}\"\n components = parse_ical(options[:url])\n events.concat(extract_events(components.first))\n end\n @log.info \"done importing iCal events.\"\n \n events\n end",
"def event_list\n @_events\n end",
"def org_events\n @org_events ||= orgs.map { |o| o.events }.flatten\n end",
"def events\n data[\"events\"]\n end",
"def all_events\n event_types.map { |etype| events(etype) }\n end",
"def events\n @events ||= parsed_json.map do |json_record|\n VoyagerUpdater::Event.new(json_record)\n end\n end",
"def events\n\t\t\t\treturn @events.values unless (@events.nil?)\n\t\t\t\treturn nil\n\t\t\tend",
"def events\n @events = registered_application.events.group_by(&:name)\n end",
"def get_events()\n\t\tevents = []\n\n\t\t@DB[:events].order(:date).each do |event|\n\t\t\tnewTimeslots = []\n\t\t\t@DB[:timeslots].where(parent_table: 'events', parent_id: event[:'id']).each do |timeslot|\n\t\t\t\tnewTimeslots.push(DateTime.parse(timeslot[:'time']))\n\t\t\tend\n\n\t\t\tnewAttendees = []\n\t\t\t@DB[:attendees].where(parent_id: event[:'id']).each do |attendee|\n\t\t\t\tattendeeTimeslots = []\n\t\t\t\t@DB[:timeslots].where(parent_table: 'attendees', parent_id: attendee[:'id']).each do |timeslot|\n\t\t\t\t\tattendeeTimeslots.push(DateTime.parse(timeslot[:'time']))\n\t\t\t\tend\n\n\t\t\t\tnewAttendee = Attendee.new(attendee[:'name'], attendeeTimeslots)\n\t\t\t\tnewAttendees.push(newAttendee)\n\t\t\tend\n\n\t\t\tnewEvent = Event.new(event[:'name'], event[:'description'], newTimeslots, newAttendees, event[:'id'])\n\t\t\tif newEvent.get_date >= @epoch\n\t\t\t\tevents.push(newEvent)\n\t\t\tend\n\t\tend\n\n\t\treturn events\n\tend",
"def get_events()\n @client.make_request(:get, @client.concat_user_path(\"#{CALL_PATH}/#{id}/events\"))[0]\n end",
"def all_events\n @events = Event.all\n end",
"def events\n @events ||= Schedule.new\n end",
"def events\n @events ||= user_events.concat(org_events).uniq\n end",
"def events_now\n\n # Open empty array\n events = []\n\n # Loop over all the calendars\n CALENDARS.each do |name, id|\n\n # Get the events\n cal_events = calendar(id).find_events_in_range(Time.now, Time.now + 60)\n\n # Loop over each one and add it to the array\n cal_events.each do |e|\n events << {event: e, cal: name} unless e.nil?\n end\n\n end\n\n # Return the final list\n return events\n\nend",
"def fetch_events(app_name, env_name, options={})\n @event_fetched_times += 1\n set_env_ready(app_name, env_name, true) # assume env become ready after it spit out all the events\n\n unless @events # unrestricted mode for testing if no explicit events set\n return [generate_event_from_messages(['Successfully deployed new configuration to environment',\n 'terminateEnvironment completed successfully',\n 'Successfully launched environment',\n 'Completed swapping CNAMEs for environments'\n ], Time.now + @event_fetched_times), nil]\n end\n\n events = @events[env_key(app_name, env_name)][@event_fetched_times - 1]\n\n if options.has_key?(:start_time)\n start_time = Time.parse(options[:start_time])\n events = events.select { |e| e[:event_date] >= start_time }\n end\n\n if limit = options[:max_records]\n events = events[0..limit]\n end\n\n [events, nil]\n end",
"def events\n @events ||= Array(context[:events]).reverse.map { |event| Concierge::SafeAccessHash.new(event) }\n end",
"def events\n data.events\n end",
"def fullcalendar_events_json\n events.map do |event|\n {\n id: event.id.to_s,\n title: event.name,\n start: event.starts_at.strftime('%Y-%m-%d %H:%M:%S'),\n end: event.ends_at.strftime('%Y-%m-%d %H:%M:%S'),\n allDay: event.all_day,\n url: event_path(event)\n }\n end\n end",
"def events(args = nil)\n events = if args.nil? || args.eql?(:all)\n events = decode_response connection.get \"/calendar/feeds/#{id}/private/full\"\n events = events[\"feed\"][\"entry\"]\n events.map{ |event| Event.build_event(event, self)}\n elsif args.is_a?(String)\n Event.new({:id => args, :calendar => self}).fetch\n elsif args.is_a?(Hash)\n if args.is_a?(Hash) && args.has_key?(:id)\n Event.new({:id => args[:id], :calendar => self}).fetch\n else\n params = { \"start-min\" => args[:from],\n \"start-max\" => args[:to]}\n events = decode_response connection.get \"/calendar/feeds/#{id}/private/full\", params\n events = events[\"feed\"][\"entry\"]\n events = events.nil? ? [] : events.map{ |event| Event.build_event(event, self)}\n end\n else\n raise ArgumentError.new \"Invalid argument type #{args.class}\"\n end\n\n end",
"def events(service, url, args)\n events = []\n ret = service.send_request(GData4Ruby::Request.new(:get, url, nil, nil, args))\n REXML::Document.new(ret.body).root.elements.each(\"entry\"){}.map do |entry|\n entry = GData4Ruby::Utils.add_namespaces(entry)\n e = GCal4Ruby::Event.new(service)\n if e.load(entry.to_s)\n events << e\n end\n end\n return events\nend",
"def request_events(access_token, my_email, calendar_id, calendar_zone)\n service = build_service(access_token)\n\n # Return each google api calendar as an ActiveRecord Calendar model\n events = get_calendar_events(service, calendar_id).map do |item|\n upsert_service_event_item(my_email, calendar_zone, item)\n end\n\n # upsert_service_event_item sometimes returns nils, when an event doesn't\n # get made\n events.reject(&:nil?)\n end",
"def search_for_future_calendar_events\n uri = URI.parse(\"https://www.googleapis.com/calendar/v3/calendars/ufbobbo%40gmail.com/events?orderBy=startTime&singleEvents=true&timeMin=#{Time.now.strftime(\"%FT%T%:z\")}&fields=items(id%2Cstart)&key=#{ENV['GOOGLE_API_KEY']}\")\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n request = Net::HTTP::Get.new(uri.request_uri)\n response = http.request(request).body\n end",
"def index\n @events = Event.order(:time).order(:date)\n \n fetch_calendar 'tgbgmclhk5gegn8t95fvqov0s8@group.calendar.google.com'\n end",
"def download_events(apiEngine)\n @events = []\n results = []\n results = apiEngine.client.execute!(\n :api_method => apiEngine.api.events.list,\n :parameters => {\n :calendarId => @ident,\n :singleEvents => true,\n :orderBy => 'startTime',\n :timeMin => @dateMin.iso8601,\n :timeMax => @dateMax.iso8601 })\n\n results.data.items.each do |event|\n if event.start.date_time\n fulldate = event.start.date_time.strftime(\"%B, %d, %Y\")\n month = fulldate.split(',')[0]\n day = fulldate.split(',')[1]\n year = fulldate.split(',')[2]\n else\n fulldate = event.start.date.to_s\n month = fulldate.split('-')[1].to_i\n month = I18n.t(\"date.month_names\")[month]\n day = fulldate.split('-')[2]\n year = fulldate.split('-')[0]\n end\n @events.push(Event.new(event.creator.email, year, month, day, :summary => event.summary))\n end\n end",
"def index\n @events = @calendar.events.all\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @events }\n end\n end",
"def venue_events venue_id\n response = get(\"/venues/#{venue_id}/events\")[\"response\"]\n @events = response[\"events\"]\n @events[\"items\"].map!{|item| Foursquared::Response::Event.new(self, item)}\n @events\n end",
"def get_events\n response = request(:get, \"/devmgr/v2/events\")\n #status(response, 200, 'Failed to get current events from server')\n #JSON.parse(response.body)\n response\n end",
"def fetch_valid_test_events\n Event::NAME_CONFIG.each do |name, e_data|\n\n @client_webhook_setting.event_sources_array.each do |event_source|\n @client_webhook_setting.event_result_types_array.each do |result_type|\n\n next if result_type != e_data[:result_type] ||\n e_data[:inavlid_source].include?(event_source)\n\n event_data = GlobalConstant::Event.send(\"#{name}_event_data\")\n\n if event_source == GlobalConstant::Event.kyc_system_source\n event_data[:user_kyc_detail][:last_acted_by] = Admin::AUTO_APPROVE_ADMIN_ID\n end\n\n event = {\n client_id: @client_id,\n event_source: event_source,\n event_name: name,\n event_data: event_data,\n event_timestamp: Time.now.to_i,\n\n client_webhook_setting_id: @webhook_id,\n lock_id: lock_id\n }\n\n @valid_events[name] ||= []\n @valid_events[name] << event\n end\n end\n end\n end",
"def event_all(stack, evt_id = nil)\n evt_id = stack.last_event_token if evt_id\n results = all_result_pages(evt_id, :body,\n \"DescribeStackEventsResponse\", \"DescribeStackEventsResult\",\n \"StackEvents\", \"member\") do |options|\n request(\n :method => :post,\n :path => \"/\",\n :form => options.merge(\n \"Action\" => \"DescribeStackEvents\",\n \"StackName\" => stack.id,\n ),\n )\n end\n events = results.map do |event|\n stack.last_event_token = event[\"NextToken\"] if event[\"NextToken\"]\n Stack::Event.new(\n stack,\n :id => event[\"EventId\"],\n :resource_id => event[\"PhysicalResourceId\"],\n :resource_name => event[\"LogicalResourceId\"],\n :resource_logical_id => event[\"LogicalResourceId\"],\n :resource_state => event[\"ResourceStatus\"].downcase.to_sym,\n :resource_status => event[\"ResourceStatus\"],\n :resource_status_reason => event[\"ResourceStatusReason\"],\n :time => Time.parse(event[\"Timestamp\"]).localtime,\n ).valid_state\n end\n if evt_id\n idx = events.index { |d| d.id == evt_id }\n idx ? events.slice(0, idx) : events\n else\n events\n end\n end",
"def index\n @calender_events = CalenderEvent.all\n end",
"def events\n return @events\n end",
"def events\n collection(\"events\")\n end",
"def index\n @calendar_events = CalendarEvent.page(params[:page])\n end",
"def get_next_calendar_events\n next_events = JSON.parse search_for_future_calendar_events\n next_events[\"items\"]\n end",
"def events\n @events ||= {}\n end",
"def events(project_id, options = {})\n get \"projects/#{project_id}/events\", options\n end",
"def events\n client = Signet::OAuth2::Client.new(client_options)\n client.update!(session[:authorization])\n\n service = Google::Apis::CalendarV3::CalendarService.new\n service.authorization = client\n #controllo se non ho mai loggato con google\n if(!session[:authorization])\n client = Signet::OAuth2::Client.new(client_options)\n redirect_to client.authorization_uri.to_s\n else\n #controllo se il token è scaduto\n response = Net::HTTP.get(URI.parse('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token='+(session[:authorization].first[1])))\n if(response.split[2][1,13] == \"invalid_token\")\n client =●●●●●● Signet::OAuth2::Client.new(client_options)\n redirect_to client.authorization_uri.to_s\n else\n @event_list = service.list_events(params[:calendar_id])\n end\n end\n end",
"def list_events(criteria = {})\n events(criteria: criteria)\n end",
"def buildEvents(events_json_response)\n\t\tevents = Array.new\n\t\t\n\t\tevents_json_response[\"results\"].each do |result|\n\t\t\tevent = {}\n\t\t\tevent[:name] = result[\"event_name\"]\n\t\t\tevent[:event_location] = \"#{result[\"neighborhood\"]}, #{result[\"street_address\"]}\"\n\t\t\tevent[:description] = result[\"web_description\"]\n\n\t\t\tarr = getCoordinates(\"#{result[\"neighborhood\"]}, #{result[\"street_address\"]}\")\n\n\t\t\tevent[:event_latitude] = arr[0] # lat\n\t\t\tevent[:event_longitude] = arr[1] # lon\n\t\t\n\n\t\t\t\n\t\t\tevents << event\n\t\t\t# Event.create!(events)\n\t\tend\t\n\n\t\tevents\n\n\tend",
"def events(criteria: {}, tenants: nil)\n query = generate_query_params(criteria)\n uri = tenants ? '/admin/events' : '/events'\n http_get(uri + query, multi_tenants_header(tenants)).map { |e| Event.new(e) }\n end",
"def events_request(next_page_token = nil)\n params = {:calendarId => calendar_id}\n params.merge!(:pageToken => next_page_token) if next_page_token\n\n JSON.parse(client.execute(\n :api_method => calendar.events.list,\n :parameters => params).response.body)\n end",
"def events\n Enumerator.new(self,:each_event).to_a\n end",
"def events\n @events ||= event_finder\n end",
"def upcoming_events(order_by: self.class::START_OLDEST_FIRST,\n status: self.class::ALL)\n EventbriteSDK::ResourceList.new(\n url_base: \"#{path}/events\",\n object_class: EventbriteSDK::Event,\n key: 'events',\n query: {\n order_by: order_by,\n status: status\n }\n )\n end",
"def add_to_calendar(events)\n events.each do |event|\n @cal.add_event(event) unless should_ignore(event)\n end\n end",
"def event_instances(calendar_id, event_id)\n end",
"def calendars\n page_token = nil\n result = execute(:api_method => service.calendar_list.list)\n entries = []\n while true\n entries += result.data.items\n if !(page_token = result.data.next_page_token)\n break\n end\n result = execute(:api_method => service.calendar_list.list,\n :parameters => {'pageToken' => page_token})\n end\n\n entries\n end",
"def day_events(date, events)\n events.select { |e| e.start_time.to_date == date }\n end",
"def day_events(date, events)\n events.select { |e| e.start_time.to_date == date }\n end",
"def events(type: nil)\n name = @resource.dig_fetch(:metadata, :name)\n namespace = @resource.dig(:metadata, :namespace)\n selector = [\n \"involvedObject.name=#{name}\",\n \"involvedObject.kind=#{@kind}\",\n ]\n selector << \"involvedObject.uid=#{@pod.uid}\" if @pod\n selector << \"type=#{type}\" if type\n SamsonKubernetes.retry_on_connection_errors do\n events = @client.get_events(\n namespace: namespace,\n field_selector: selector.join(\",\")\n ).fetch(:items)\n\n # ignore events from before the deploy, comparing strings for speed\n events.select! { |e| last_timestamp(e) >= @start }\n\n # https://github.com/kubernetes/kubernetes/issues/29838\n events.sort_by! { |e| last_timestamp(e) }\n\n events\n end\n rescue *SamsonKubernetes.connection_errors => e\n # similar to kubernetes/resource.rb error handling\n error_location = \"#{name} #{namespace} #{@deploy_group.name}\"\n raise Samson::Hooks::UserError, \"Kubernetes error #{error_location}: #{e.message}\"\n end",
"def events\n events = []\n now = Time.now\n @countdowns.times.each do |id, show_time|\n # Prevents extra refreshes\n latest_time = show_time.latest\n events << CalendarEvent.new(\n Shows.find_show(id.to_s).title,\n latest_time,\n latest_time + (60*60*3)\n ) unless (latest_time - now) > (60*60*24*7)\n end\n events.sort! {|a,b| a.start_time <=> b.start_time}\n end",
"def index\n @calendars = Event.all\n end",
"def get_events\n @doc.css(\"#cal-event-entry\")\n end",
"def project_events(project_id, params = {})\n make_get_request(\"/projects/#{project_id}/events\", params)\n end",
"def all(params = {})\n req = WebPay::EventListRequest.create(params)\n raw_response = @client._request(:get, 'events', req)\n WebPay::EventResponseList.new(raw_response)\n end",
"def events\n event_enum\n end",
"def events\n event_enum\n end",
"def events\n requires :label, :application_name\n service.events.all({\n 'ApplicationName' => application_name,\n 'VersionLabel' => label\n })\n end",
"def calendars\n records 'calendar', '/calendars.xml', :method => :get\n end",
"def events(options = {})\n @events ||= {}\n @events[options.to_s] ||= post('facebook.events.get', options) do |response|\n response.map do |hash|\n Event.from_hash(hash)\n end\n end\n end",
"def available_events\n\t\treturn current_room.events || []\n\tend",
"def events\n event_lookup()\n end",
"def events\n event_lookup()\n end",
"def events_for_venue\n @events = []\n\n if params[:venue_id]\n @events = Event.where(venue_id: params[:venue_id]).eager_load(:dates).merge(EventDate.not_finished)\n end\n\n end",
"def events\n return if @events.empty?\n @events.uniq.sort\n end",
"def event_collections(project_id)\n resource \"projects/#{project_id}/events\"\n end",
"def find_events(event)\n events = []\n name = event.name\n category = event.category\n description = event.description\n snapshots = find_project_snapshots(event.snapshot_id)\n snapshots.each do |snapshot|\n snapshot.events.reject { |e| e.name!=name || e.category!=category }.each do |event|\n events << event\n end\n end\n events\n end",
"def events\n @events ||= RubyEvents::Events.new(self)\n end",
"def events\n @finity.events.map { |name, _| name }\n end",
"def event_list\n events_list\n end",
"def events\n lfm_path = \"artist.getevents&artist=#{@name}\"\n lfm_data = LastFm::fetch_data(lfm_path)\n return Event.create_from_hash(Hash.from_xml(lfm_data)['lfm']['events']['event']) \n end",
"def event_all(stack, marker = nil)\n params = marker ? {:marker => marker} : {}\n result = request(\n :path => \"/stacks/#{stack.name}/#{stack.id}/events\",\n :method => :get,\n :expects => 200,\n :params => params\n )\n result.fetch(:body, :events, []).map do |event|\n Stack::Event.new(\n stack,\n :id => event[:id],\n :resource_id => event[:physical_resource_id],\n :resource_name => event[:resource_name],\n :resource_logical_id => event[:logical_resource_id],\n :resource_state => event[:resource_status].downcase.to_sym,\n :resource_status => event[:resource_status],\n :resource_status_reason => event[:resource_status_reason],\n :time => Time.parse(event[:event_time])\n ).valid_state\n end\n end",
"def events=(*events)\n @events = events.flatten.uniq\n end",
"def get_calendars\r\n http = Net::HTTP.new(@google_url, 80)\r\n response, data = http.get(\"http://#{@google_url}/calendar/feeds/\" + @user_id, @headers)\r\n case response\r\n when Net::HTTPSuccess, Net::HTTPRedirection\r\n redirect_response, redirect_data = http.get(response['location'], @headers)\r\n case response\r\n when Net::HTTPSuccess, Net::HTTPRedirection\r\n doc = REXML::Document.new redirect_data\r\n \t doc.elements.each('//entry')do |e|\r\n \t title = e.elements['title'].text\r\n \t url = e.elements['link'].attributes['href']\r\n \t @calendars << GCalendar.new(title, url.sub!(\"http://#{@google_url}\",''))\r\n \t end\r\n return redirect_response\r\n else\r\n response.error!\r\n end\r\n else\r\n response.error!\r\n end\r\n end",
"def all_events\n events.keys\n end",
"def user_events\n @user_events ||= users.map { |u| u.events }.flatten\n end",
"def events\n metadata['events'].sort_by! { |event| event['timestamp'] }\n end",
"def upcoming_events\n [] || Event.where('event_start_date > :date AND event_end_date > :date AND status = :status', {date: Time.now.strftime('%Y-%m-%d'), status: Event.statuses.ready}).limit(3)\n end",
"def events(service_id, options = {})\n response = JSON.parse(@client.get(\"/api/v1/services/#{service_id}/events\", options).body)\n return response[\"events\"] || response\n end",
"def get_events_get(startdate,\r\n events = nil,\r\n sort = nil,\r\n enddate = nil,\r\n offset = 0,\r\n limit = 10,\r\n subject = nil,\r\n xapiheader = nil,\r\n fromaddress = nil,\r\n email = nil)\r\n # Prepare query url.\r\n _path_url = '/events'\r\n _query_builder = Configuration.base_uri.dup\r\n _query_builder << _path_url\r\n _query_builder = APIHelper.append_url_with_query_parameters(\r\n _query_builder,\r\n {\r\n 'startdate' => startdate,\r\n 'events' => events,\r\n 'sort' => sort,\r\n 'enddate' => enddate,\r\n 'offset' => offset,\r\n 'limit' => limit,\r\n 'subject' => subject,\r\n 'xapiheader' => xapiheader,\r\n 'fromaddress' => fromaddress,\r\n 'email' => email\r\n },\r\n array_serialization: Configuration.array_serialization\r\n )\r\n _query_url = APIHelper.clean_url _query_builder\r\n # Prepare and execute HttpRequest.\r\n _request = @http_client.get(\r\n _query_url\r\n )\r\n CustomHeaderAuth.apply(_request)\r\n _context = execute_request(_request)\r\n # Validate response against endpoint and global error codes.\r\n if _context.response.status_code == 400\r\n raise APIException.new(\r\n 'API Response',\r\n _context\r\n )\r\n elsif _context.response.status_code == 401\r\n raise APIException.new(\r\n 'API Response',\r\n _context\r\n )\r\n elsif _context.response.status_code == 403\r\n raise APIException.new(\r\n 'API Response',\r\n _context\r\n )\r\n elsif _context.response.status_code == 405\r\n raise APIException.new(\r\n 'Invalid input',\r\n _context\r\n )\r\n end\r\n validate_response(_context)\r\n # Return appropriate response type.\r\n _context.response.raw_body\r\n end",
"def events\n @events ||= LIFECYCLE_EVENTS.each_with_object({}) { |e, a| a[e] = [] }\n end",
"def user_events(user_events_array)\n \tuser_events_array.map do |user_event|\n \t\tuser_event.event\n \tend\n end",
"def save_events(events)\n events.each do |e|\n new_event = Event.create(e)\n end\n end",
"def events\n subcomponents[\"VEVENT\"]\n end"
] | [
"0.72515243",
"0.7226806",
"0.71277463",
"0.7121339",
"0.7039129",
"0.7026969",
"0.7017228",
"0.69764996",
"0.692445",
"0.68752366",
"0.68379885",
"0.67639434",
"0.67608327",
"0.67442226",
"0.66917515",
"0.66832185",
"0.66683763",
"0.66585296",
"0.66338986",
"0.65893066",
"0.6569153",
"0.65386736",
"0.65207",
"0.649343",
"0.648327",
"0.64676845",
"0.64464664",
"0.6443746",
"0.6441355",
"0.64403266",
"0.6416891",
"0.64001393",
"0.6399909",
"0.6395923",
"0.639496",
"0.6391366",
"0.63731223",
"0.6365149",
"0.6347912",
"0.63103765",
"0.6308207",
"0.630692",
"0.629638",
"0.62928694",
"0.62891597",
"0.62815154",
"0.6279772",
"0.62735564",
"0.62726814",
"0.62643665",
"0.62579674",
"0.6256007",
"0.62528306",
"0.6247127",
"0.6223028",
"0.62212926",
"0.62200725",
"0.6219113",
"0.6199147",
"0.6187423",
"0.6165666",
"0.6155746",
"0.61543",
"0.61517406",
"0.61274964",
"0.61274964",
"0.6107936",
"0.6083718",
"0.60811627",
"0.6070444",
"0.60570437",
"0.60512227",
"0.6045952",
"0.6045952",
"0.6039796",
"0.6039185",
"0.60389674",
"0.6029328",
"0.6026628",
"0.6026628",
"0.60222334",
"0.6016899",
"0.60136354",
"0.60025114",
"0.6000901",
"0.5985031",
"0.5984163",
"0.5980997",
"0.5978778",
"0.5976249",
"0.59758997",
"0.5967161",
"0.5962509",
"0.5945607",
"0.59402835",
"0.5939195",
"0.59377086",
"0.5932736",
"0.59269977",
"0.59141105",
"0.5904328"
] | 0.0 | -1 |
GET /franchises GET /franchises.json | def index
@franchises = Franchise.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show \n @franchise = @franchise_set.franchises.find(params[:id])\n\n respond_with(@franchise_set, @franchise)\n end",
"def show \n @franchise_set = FranchiseSet.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @franchise_set }\n end\n end",
"def consulta\n fiesta = Fiesta.all\n render json: fiesta\n end",
"def index\n @franchises = @location.franchises.order(:name)\n #@franchises = Franchise.all\n end",
"def new \n @franchise_set = @franchise_set.franchises.new\n @franchise = @franchise_set.franchises.build\n\n respond_with(@franchise_set, @franchise)\n end",
"def index\n @fretes = Frete.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @fretes }\n end\n end",
"def index\n @frais_hebergements = FraisHebergement.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @frais_hebergements }\n end\n end",
"def show\n @forest = Forest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @forest }\n end\n end",
"def index\n\t @fares = Fare.all\n\n\t respond_to do |format|\n\t\tformat.html # index.html.erb\n\t\tformat.json { render json: @fares }\n\t end\n\tend",
"def show\n @fundraiser = Fundraiser.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fundraiser }\n end\n end",
"def show\n @fundraiser = Fundraiser.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fundraiser }\n end\n end",
"def display_franchises(company)\n puts \"#{company.name} owns these franchises:\"\n company.franchises.select { |franchise| puts \"Franchise #{franchise.id} in #{franchise.location}\" }\n end",
"def show\n @franchise = Franchise.find(params[:id])\n end",
"def index\n render json: @fiestas\n end",
"def new \n @franchise_set = FranchiseSet.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @franchise_set }\n end\n end",
"def index\n @filials = Filial.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @filials }\n end\n end",
"def show\n @frete = Frete.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @frete }\n end\n end",
"def show\n @fortune = Fortune.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fortune }\n end\n end",
"def show\n @frais_hebergement = FraisHebergement.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @frais_hebergement }\n end\n end",
"def index\n @familia = Familium.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @familia }\n end\n end",
"def show\n @familium = Familium.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @familium }\n end\n end",
"def create \n @franchise = @franchise_set.franchises.new(params[:franchise])\n\n respond_to do |format|\n if @franchise.save \n format.html { redirect_to \"/franchise_sets/#{@franchise_set.id}/edit\", :franchise_set_id => franchise_set.id, notice: 'Franchise was successfully created.' }\n format.json { render json: @franchise}\n else\n format.html { render action: \"new\" }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @family_crest = FamilyCrest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family_crest }\n end\n end",
"def show\n @frais_annex = FraisAnnex.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @frais_annex }\n end\n end",
"def show\n\t @fare = Fare.find(params[:id])\n\n\t respond_to do |format|\n\t\tformat.html # show.html.erb\n\t\tformat.json { render json: @fare }\n\t end\n\tend",
"def show\n render json: Festival.build_for(params[:id]).to_json\n end",
"def show\n @fund = Fund.friendly.find(params[:id])\n\n render json: @fund\n end",
"def index\n @funds = Fund.all\n\n render json: @funds\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @franchise }\n end\n end",
"def show\n @filial = Filial.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @filial }\n end\n end",
"def index\n @search = Franchise.search(params[:search])\n @franchises = @search.paginate(:page => params[:page])\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @franchises }\n end\n end",
"def show\n @fridge = Fridge.find(params[:id])\n @items = @fridge.fridge_items # for _item_list.html.erb\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fridge }\n end\n end",
"def get_favourite_restaurants\n @profile = Profile.find(params[:id])\n @restaurants = @profile.favourites\n\n render status: 200, json: @restaurants\n end",
"def index\n @frais_annexes = FraisAnnex.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @frais_annexes }\n end\n end",
"def show\n render json: @fund\n end",
"def create\n @franchise = @location.franchises.new(franchise_params)\n\n respond_to do |format|\n if @franchise.save\n format.html { redirect_to [@client, @location, @franchise], notice: 'Franchise was successfully created.' }\n format.json { render action: 'show', status: :created, location: @franchise }\n else\n format.html { render action: 'new' }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @family = get_family(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family }\n end\n end",
"def index\n @frais_repas = FraisRepa.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @frais_repas }\n end\n end",
"def show\n @frais_repa = FraisRepa.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @frais_repa }\n end\n end",
"def index\n @frances = France.all\n end",
"def show\n require 'net/http'\n require 'json'\n\n response = Net::HTTP.get_response( URI.parse( \"http://freeshit.firebaseio.com/items/%s.json\" % [ params[:id] ] ) );\n\n begin\n @fb_item = JSON.parse(response.body)\n rescue\n render :status => 404, :text => 'Item not found.'\n return\n end\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fb_item }\n end\n end",
"def show\n @foil = Foil.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @foil }\n end\n end",
"def show\n dinosaurs = Dinosaur.filter_by_species(params[:species])\n\n if dinosaurs\n render json: dinosaurs\n else\n render json: dinosaurs.errors, status: :unprocessable_entity\n end\n end",
"def show\n @farmer = Farmer.find(params[:id])\n\t\t@dispensals = @farmer.dispensals.all.paginate(:page => params[:page], :order => 'updated_at DESC', :per_page => 5)\n\t\t@donations = @farmer.donations.all.paginate(:page => params[:page], :order => 'updated_at DESC', :per_page => 5)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @farmer }\n end\n end",
"def index\n @frats = Frat.all\n end",
"def show\n family = current_family\n p current_family\n render json: family.as_json\n end",
"def show\n\n respond_to do |format|\n format.html { # show.html.erb\n @flyer_info = FlyerInfo.find(params[:id])\n }\n format.json {\n render json: getflyer(params[:id])\n }\n end\n\n end",
"def flights_fields\n render json: Search.getFlightsFieldInfo()\n end",
"def index\n @flyer_infos = FlyerInfo.all(:select => \"id, topimg, sideimg, capacity, speed, stormresist, multiplier, price, tier, load_time, disabled\")\n @language = ApplicationHelper.preferred_language(request.headers[\"Accept-Language\"])\n\n @complete_flyers = @flyer_infos.collect { |flyer_info|\n flyer_info.as_json.merge(FlyerInfosHelper.getflyerloc(flyer_info, @language).first.as_json)\n }\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @complete_flyers }\n end\n end",
"def show\n # render json: @birthday_party\n end",
"def index\n @flights = Flight.all\n render json: @flights\n end",
"def show\n @golfer = Golfer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @golfer }\n end\n end",
"def index\n @devises = Devise.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @devises }\n end\n end",
"def show\n @title_view = 'Culturas'\n @cultural_heritage_culture = CulturalHeritage::Culture.find(params[:id])\n\n points = @cultural_heritage_culture.list_point\n\n if points.length > 0\n\n count = 0\n @json = \"[[\"\n points.each do |point|\n if (count == 0)\n @json = @json << '{\"lng\": ' << point.longitude.to_s << ', \"lat\": ' << point.latitude.to_s <<\n ', \"strokeColor\": \"#FF0000\", \"strokeOpacity\": 0.3, \"strokeWeight\": 1, \"fillColor\": \"#FF0000\", \"fillOpacity\": 0.7}'\n else\n @json = @json << ', {\"lng\": ' << point.longitude.to_s << ', \"lat\": ' << point.latitude.to_s << '}'\n end\n count = count + 1\n end\n @json = @json << \"]]\"\n\n end\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @cultural_heritage_culture }\n format.json { render :json => @cultural_heritage_culture }\n end\n end",
"def show\n @foiltype = Foiltype.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @foiltype }\n end\n end",
"def show\n @fulcliente = Fulcliente.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fulcliente }\n end\n end",
"def fabrics\n # API GET for fabrics\n result = @fabrics.fabrics\n result[1]\n end",
"def show\n @favourite_food = FavouriteFood.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @favourite_food }\n end\n end",
"def index\n @farms = current_user.farms\n set_page_title\n\n @barn = []\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @farms }\n end\n end",
"def show\n @finance = Finance.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @finance }\n end\n end",
"def new\n @fortune = Fortune.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fortune }\n end\n end",
"def flights\n trip = Trip.where('id = ?', params[:id]).take\n if !trip.nil\n respond_with( trip.flights )\n else\n render :json => { error: 404 }, :status => 404\n end\n end",
"def show\n @family = Family.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family }\n end\n end",
"def show\n @passivo_circulante_financeiro = PassivoCirculanteFinanceiro.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @passivo_circulante_financeiro }\n end\n end",
"def index\n @dev_folios = DevFolio.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @dev_folios }\n end\n end",
"def show\n @favorite_flyer = FavoriteFlyer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @favorite_flyer }\n end\n end",
"def show\n @surgery = Surgery.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @surgery }\n end\n end",
"def show\n @dependencia = Dependencia.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dependencia }\n end\n end",
"def show\n @nfer = Nfer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @nfer }\n end\n end",
"def new\n @fundraiser = Fundraiser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fundraiser }\n end\n end",
"def new\n @farmer = Farmer.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @farmer }\n end\n end",
"def show\n @fred = Fred.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fred }\n end\n end",
"def index \n fans = Fan.all \n render json: fans \n end",
"def flights\n result = Search.search_flights(params)\n if result\n # The search gave us something so we can return it\n render json: result\n else\n # Something went wrong, return a 500 error with no body\n render status: 500, json: nil\n end\n end",
"def show\n @flight = Flight.find(params[:id])\n render json: @flight\n end",
"def get_folio_data\n netid = params['netid']\n url = ENV['OKAPI_URL']\n tenant = ENV['OKAPI_TENANT']\n account = CUL::FOLIO::Edge.patron_account(url, tenant, folio_token, { username: netid })\n # Rails.logger.debug(\"mjc12test: Got FOLIO account #{account.inspect}\")\n render json: account\n end",
"def show\n begin\n @fucker = Fucker.find(params[:id])\n respond_to do |format|\n format.json { render json: @fucker }\n end\n rescue => err\n $log.warn(err)\n respond_to do |format|\n format.json { render json: err, status: :internal_server_error }\n end\n end\n end",
"def show\n render json: @diet, status: 200, root: true\n end",
"def index\n @franchisee_royalties = FranchiseeRoyalty.all\n end",
"def show\n @fec_filing = FecFiling.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @fec_filing }\n end\n end",
"def show\n @fec_filing = FecFiling.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @fec_filing }\n end\n end",
"def index\n @spoofers = Spoofer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @spoofers }\n end\n end",
"def index\n @factura = Factura.find(params[:factura_id])\n @renglon_facturas = @factura.renglon_facturas\n\n respond_to do |format|\n format.html # index.html.erb\n #format.json { render json: @renglon_facturas }\n end\n end",
"def show\n @fase = Fase.find(params[:id])\n\n respond_to do |format|\n format.json { render json: @fase }\n format.js\n end\n end",
"def familiarity artist\n url = \"http://developer.echonest.com/api/v4/artist/familiarity?api_key=#{ECHONEST_API_KEY}&name=#{artist}&format=json\"\n result = parseURL url\n result[\"response\"][\"artist\"][\"familiarity\"]\nend",
"def show\n @visitation = Visitation.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @visitation }\n end\n end",
"def index\n @frinds = Frind.all\n end",
"def index\n @cofis = Cofi.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @cofis }\n end\n end",
"def get_featured\n render json: Event.where(is_featured: true), status: :ok\n end",
"def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @life_insurance }\n end\n end",
"def new\n @frais_hebergement = FraisHebergement.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @frais_hebergement }\n end\n end",
"def index\n @departamentos = Departamento.all\n\n render json: @departamentos\n end",
"def show\n render json: @departamento\n end",
"def index\n @forests = Forest.all\n end",
"def new\n @frete = Frete.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @frete }\n end\n end",
"def show\n @fabric = Fabric.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fabric }\n end\n end",
"def show\n @flight = Flight.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @flight }\n end\n end",
"def show\n @flight = Flight.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @flight }\n end\n end",
"def index\n @farms = Farm.all\n\t\trespond_with(@users,@farms)\n end",
"def show\n @departure = Departure.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @departure }\n end\n end"
] | [
"0.68629324",
"0.66568244",
"0.65204364",
"0.6466579",
"0.6461302",
"0.6414443",
"0.6412911",
"0.63708305",
"0.63563764",
"0.6344071",
"0.6344071",
"0.63136554",
"0.6311347",
"0.6288984",
"0.626852",
"0.624103",
"0.6216137",
"0.6170594",
"0.61650807",
"0.6142835",
"0.6113395",
"0.6083594",
"0.6081221",
"0.604328",
"0.60310304",
"0.6029758",
"0.60214025",
"0.60209787",
"0.6018042",
"0.60129756",
"0.6011935",
"0.5980373",
"0.59716684",
"0.5963693",
"0.5961936",
"0.5958985",
"0.59523886",
"0.5940975",
"0.5934158",
"0.59289825",
"0.59284574",
"0.5922714",
"0.58787817",
"0.58782786",
"0.5871184",
"0.5868981",
"0.58652943",
"0.5860182",
"0.58473736",
"0.5839764",
"0.583748",
"0.583074",
"0.5830189",
"0.581547",
"0.5811845",
"0.58087355",
"0.5804655",
"0.5801873",
"0.5801281",
"0.5800266",
"0.57944643",
"0.5790907",
"0.57833934",
"0.5782493",
"0.57812554",
"0.5780651",
"0.5777787",
"0.57775754",
"0.577266",
"0.5757382",
"0.5757367",
"0.57562",
"0.5752546",
"0.574489",
"0.57402474",
"0.57375634",
"0.5736445",
"0.573462",
"0.57303095",
"0.5729944",
"0.5729944",
"0.57289386",
"0.57248676",
"0.5711984",
"0.57079935",
"0.57047004",
"0.5701378",
"0.5695403",
"0.56952393",
"0.56952304",
"0.56915176",
"0.5687325",
"0.56847274",
"0.56791353",
"0.5678273",
"0.56780845",
"0.567406",
"0.56740093",
"0.5672569",
"0.56713176"
] | 0.69259226 | 0 |
GET /franchises/1 GET /franchises/1.json | def show
@address = Address.find_by(id: @franchise.address_id)
@loss_types = LossType.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show \n @franchise = @franchise_set.franchises.find(params[:id])\n\n respond_with(@franchise_set, @franchise)\n end",
"def show \n @franchise_set = FranchiseSet.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @franchise_set }\n end\n end",
"def index\n @franchises = Franchise.all\n end",
"def show\n @forest = Forest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @forest }\n end\n end",
"def new \n @franchise_set = @franchise_set.franchises.new\n @franchise = @franchise_set.franchises.build\n\n respond_with(@franchise_set, @franchise)\n end",
"def show\n @franchise = Franchise.find(params[:id])\n end",
"def consulta\n fiesta = Fiesta.all\n render json: fiesta\n end",
"def show\n render json: Festival.build_for(params[:id]).to_json\n end",
"def show\n @fundraiser = Fundraiser.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fundraiser }\n end\n end",
"def show\n @fundraiser = Fundraiser.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fundraiser }\n end\n end",
"def new \n @franchise_set = FranchiseSet.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @franchise_set }\n end\n end",
"def index\n @franchises = @location.franchises.order(:name)\n #@franchises = Franchise.all\n end",
"def show\n @frete = Frete.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @frete }\n end\n end",
"def show\n @familium = Familium.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @familium }\n end\n end",
"def show\n @fortune = Fortune.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fortune }\n end\n end",
"def index\n @fretes = Frete.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @fretes }\n end\n end",
"def index\n\t @fares = Fare.all\n\n\t respond_to do |format|\n\t\tformat.html # index.html.erb\n\t\tformat.json { render json: @fares }\n\t end\n\tend",
"def index\n render json: @fiestas\n end",
"def show\n @frais_hebergement = FraisHebergement.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @frais_hebergement }\n end\n end",
"def show\n @filial = Filial.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @filial }\n end\n end",
"def show\n @fund = Fund.friendly.find(params[:id])\n\n render json: @fund\n end",
"def index\n @frais_hebergements = FraisHebergement.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @frais_hebergements }\n end\n end",
"def index\n @filials = Filial.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @filials }\n end\n end",
"def index\n @familia = Familium.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @familia }\n end\n end",
"def show\n @family = get_family(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family }\n end\n end",
"def show\n\t @fare = Fare.find(params[:id])\n\n\t respond_to do |format|\n\t\tformat.html # show.html.erb\n\t\tformat.json { render json: @fare }\n\t end\n\tend",
"def show\n @frais_repa = FraisRepa.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @frais_repa }\n end\n end",
"def show\n @family_crest = FamilyCrest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family_crest }\n end\n end",
"def show\n @flight = Flight.find(params[:id])\n render json: @flight\n end",
"def show\n @frais_annex = FraisAnnex.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @frais_annex }\n end\n end",
"def show\n @foil = Foil.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @foil }\n end\n end",
"def show\n @foiltype = Foiltype.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @foiltype }\n end\n end",
"def show\n @golfer = Golfer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @golfer }\n end\n end",
"def create \n @franchise = @franchise_set.franchises.new(params[:franchise])\n\n respond_to do |format|\n if @franchise.save \n format.html { redirect_to \"/franchise_sets/#{@franchise_set.id}/edit\", :franchise_set_id => franchise_set.id, notice: 'Franchise was successfully created.' }\n format.json { render json: @franchise}\n else\n format.html { render action: \"new\" }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n\n respond_to do |format|\n format.html { # show.html.erb\n @flyer_info = FlyerInfo.find(params[:id])\n }\n format.json {\n render json: getflyer(params[:id])\n }\n end\n\n end",
"def show\n @fabric = Fabric.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fabric }\n end\n end",
"def show\n family = current_family\n p current_family\n render json: family.as_json\n end",
"def show\n begin\n @fucker = Fucker.find(params[:id])\n respond_to do |format|\n format.json { render json: @fucker }\n end\n rescue => err\n $log.warn(err)\n respond_to do |format|\n format.json { render json: err, status: :internal_server_error }\n end\n end\n end",
"def show\n @flight = Flight.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @flight }\n end\n end",
"def show\n @flight = Flight.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @flight }\n end\n end",
"def show\n @family = Family.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family }\n end\n end",
"def show\n @dependencia = Dependencia.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dependencia }\n end\n end",
"def show\n begin\n @dev_folio = DevFolio.find(params[:id])\n rescue\n @dev_folio = DevFolio.where(label: params[:id]).first()\n end\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dev_folio }\n end\n end",
"def show\n @fulcliente = Fulcliente.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fulcliente }\n end\n end",
"def flights\n trip = Trip.where('id = ?', params[:id]).take\n if !trip.nil\n respond_with( trip.flights )\n else\n render :json => { error: 404 }, :status => 404\n end\n end",
"def show\n @nfer = Nfer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @nfer }\n end\n end",
"def show\n @fred = Fred.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fred }\n end\n end",
"def display_franchises(company)\n puts \"#{company.name} owns these franchises:\"\n company.franchises.select { |franchise| puts \"Franchise #{franchise.id} in #{franchise.location}\" }\n end",
"def show\n require 'net/http'\n require 'json'\n\n response = Net::HTTP.get_response( URI.parse( \"http://freeshit.firebaseio.com/items/%s.json\" % [ params[:id] ] ) );\n\n begin\n @fb_item = JSON.parse(response.body)\n rescue\n render :status => 404, :text => 'Item not found.'\n return\n end\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fb_item }\n end\n end",
"def show\n @father = Father.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @father }\n end\n end",
"def show\n @fridge = Fridge.find(params[:id])\n @items = @fridge.fridge_items # for _item_list.html.erb\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fridge }\n end\n end",
"def new\n @fortune = Fortune.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fortune }\n end\n end",
"def show\n @favorite_flyer = FavoriteFlyer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @favorite_flyer }\n end\n end",
"def index\n @flights = Flight.all\n render json: @flights\n end",
"def index\n @frais_repas = FraisRepa.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @frais_repas }\n end\n end",
"def show\n fatura = Fatura.find(params[:id])\n render json: {status: 'SUCCESS', message:'Fatura loaded', data:fatura}, status: :ok\n end",
"def show\n render json: @fund\n end",
"def show\n @basis = Base.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @basis }\n end\n end",
"def index\n @frats = Frat.all\n end",
"def show\n @factura = Factura.find(params[:factura_id])\n @renglon_factura = @factura.renglon_facturas.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n #format.json { render json: @renglon_factura }\n end\n end",
"def get_favourite_restaurants\n @profile = Profile.find(params[:id])\n @restaurants = @profile.favourites\n\n render status: 200, json: @restaurants\n end",
"def show\n @fec_filing = FecFiling.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @fec_filing }\n end\n end",
"def show\n @fec_filing = FecFiling.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @fec_filing }\n end\n end",
"def show\n @farmer = Farmer.find(params[:id])\n\t\t@dispensals = @farmer.dispensals.all.paginate(:page => params[:page], :order => 'updated_at DESC', :per_page => 5)\n\t\t@donations = @farmer.donations.all.paginate(:page => params[:page], :order => 'updated_at DESC', :per_page => 5)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @farmer }\n end\n end",
"def index\n @funds = Fund.all\n\n render json: @funds\n end",
"def show\n @family = Family.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @family }\n end\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @franchise }\n end\n end",
"def show\n @fueltype = Fueltype.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fueltype }\n end\n end",
"def create\n @franchise = @location.franchises.new(franchise_params)\n\n respond_to do |format|\n if @franchise.save\n format.html { redirect_to [@client, @location, @franchise], notice: 'Franchise was successfully created.' }\n format.json { render action: 'show', status: :created, location: @franchise }\n else\n format.html { render action: 'new' }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @passivo_circulante_financeiro = PassivoCirculanteFinanceiro.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @passivo_circulante_financeiro }\n end\n end",
"def show\n @favourite_food = FavouriteFood.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @favourite_food }\n end\n end",
"def new\n @farmer = Farmer.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @farmer }\n end\n end",
"def show\n @fishing_method = FishingMethod.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fishing_method }\n end\n end",
"def show\n @finance = Finance.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @finance }\n end\n end",
"def new\n @flower = Flower.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @flower }\n end\n end",
"def fabrics\n # API GET for fabrics\n result = @fabrics.fabrics\n result[1]\n end",
"def show\n @colegiatura = Colegiatura.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @colegiatura }\n end\n end",
"def show\n @faction = Faction.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @faction }\n end\n end",
"def show\n @fhir_base_url = FhirBaseUrl.find(params[:id])\n\n respond_to do |format|\n format.html { redirect_to :action => :edit }\n format.json { render json: @fhir_base_url }\n end\n end",
"def show\n @rfq = Rfq.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @rfq }\n end\n end",
"def show\n @visitation = Visitation.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @visitation }\n end\n end",
"def show\n @departure = Departure.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @departure }\n end\n end",
"def index\n @frances = France.all\n end",
"def InfoFarmacia\n \tid = params[:id]\n @farmacias = Farmacium.find_by_sql(\"SELECT nombre, correo, direccion, latitud, longitud, telefono1, telefono2 FROM farmacia where id = #{id}\")\n render json: @farmacias\n end",
"def show\n @cofi = Cofi.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cofi }\n end\n end",
"def index\n @factura = Factura.find(params[:factura_id])\n @renglon_facturas = @factura.renglon_facturas\n\n respond_to do |format|\n format.html # index.html.erb\n #format.json { render json: @renglon_facturas }\n end\n end",
"def show\n @referee = Referee.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @referee }\n end\n end",
"def flights_fields\n render json: Search.getFlightsFieldInfo()\n end",
"def show\n @food = Food.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @food }\n end\n end",
"def show\n @fase = Fase.find(params[:id])\n\n respond_to do |format|\n format.json { render json: @fase }\n format.js\n end\n end",
"def show\n @fish_type = FishType.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fish_type }\n end\n end",
"def show\n\n @food = Food.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @food }\n end\n end",
"def show\n @spoofer = Spoofer.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @spoofer }\n end\n end",
"def index\n @frais_annexes = FraisAnnex.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @frais_annexes }\n end\n end",
"def show\n @fire = Fire.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fire }\n end\n end",
"def show\n @central_correio = CentralCorreio.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @central_correio }\n end\n end",
"def show\n @futbolada = Futbolada.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @futbolada }\n end\n end",
"def new\n @fundraiser = Fundraiser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fundraiser }\n end\n end",
"def new\n @frete = Frete.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @frete }\n end\n end",
"def show\n @ftype = Ftype.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ftype }\n end\n end",
"def show\n @title_view = 'Culturas'\n @cultural_heritage_culture = CulturalHeritage::Culture.find(params[:id])\n\n points = @cultural_heritage_culture.list_point\n\n if points.length > 0\n\n count = 0\n @json = \"[[\"\n points.each do |point|\n if (count == 0)\n @json = @json << '{\"lng\": ' << point.longitude.to_s << ', \"lat\": ' << point.latitude.to_s <<\n ', \"strokeColor\": \"#FF0000\", \"strokeOpacity\": 0.3, \"strokeWeight\": 1, \"fillColor\": \"#FF0000\", \"fillOpacity\": 0.7}'\n else\n @json = @json << ', {\"lng\": ' << point.longitude.to_s << ', \"lat\": ' << point.latitude.to_s << '}'\n end\n count = count + 1\n end\n @json = @json << \"]]\"\n\n end\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @cultural_heritage_culture }\n format.json { render :json => @cultural_heritage_culture }\n end\n end"
] | [
"0.69264513",
"0.6824369",
"0.6803302",
"0.6521705",
"0.6509705",
"0.6501075",
"0.64499724",
"0.64327174",
"0.6416755",
"0.6416755",
"0.64097106",
"0.64078087",
"0.63947767",
"0.6377932",
"0.63657635",
"0.6353808",
"0.6286338",
"0.62391776",
"0.6233477",
"0.6228835",
"0.62232786",
"0.62220794",
"0.6217948",
"0.6208894",
"0.61936015",
"0.61773217",
"0.6172774",
"0.61626935",
"0.61471957",
"0.6142208",
"0.61355835",
"0.6120393",
"0.60780114",
"0.60766745",
"0.60622215",
"0.6053709",
"0.604804",
"0.6044219",
"0.60379165",
"0.6037658",
"0.60365695",
"0.60303414",
"0.60249615",
"0.6010608",
"0.60080445",
"0.59914833",
"0.59782517",
"0.597773",
"0.5975257",
"0.5974105",
"0.59705913",
"0.5960676",
"0.5960213",
"0.59549576",
"0.5941746",
"0.5930354",
"0.5930094",
"0.59286076",
"0.5927613",
"0.5926795",
"0.5924399",
"0.5923953",
"0.5923953",
"0.59213567",
"0.5916685",
"0.59039134",
"0.5901105",
"0.58918625",
"0.5887099",
"0.5886053",
"0.5879453",
"0.5879167",
"0.5878878",
"0.58761835",
"0.58735085",
"0.58608466",
"0.58594185",
"0.5859128",
"0.5858425",
"0.58576494",
"0.5855888",
"0.5842426",
"0.58395725",
"0.5837062",
"0.58369344",
"0.5836341",
"0.58354324",
"0.5823937",
"0.5822758",
"0.5821407",
"0.58155763",
"0.58155197",
"0.5814911",
"0.5814252",
"0.58050907",
"0.5803346",
"0.58006316",
"0.57965857",
"0.579653",
"0.57918024",
"0.5790895"
] | 0.0 | -1 |
POST /franchises POST /franchises.json | def create
@franchise = Franchise.new(franchise_params)
@address = Address.new(address_params)
@franchise.address_id = @address.id
respond_to do |format|
if @franchise.save
format.html { redirect_to @franchise, notice: 'Franchise was successfully created.' }
format.json { render :show, status: :created, location: @franchise }
else
format.html { render :new }
format.json { render json: @franchise.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create \n @franchise = @franchise_set.franchises.new(params[:franchise])\n\n respond_to do |format|\n if @franchise.save \n format.html { redirect_to \"/franchise_sets/#{@franchise_set.id}/edit\", :franchise_set_id => franchise_set.id, notice: 'Franchise was successfully created.' }\n format.json { render json: @franchise}\n else\n format.html { render action: \"new\" }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @franchise = @location.franchises.new(franchise_params)\n\n respond_to do |format|\n if @franchise.save\n format.html { redirect_to [@client, @location, @franchise], notice: 'Franchise was successfully created.' }\n format.json { render action: 'show', status: :created, location: @franchise }\n else\n format.html { render action: 'new' }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new \n @franchise_set = @franchise_set.franchises.new\n @franchise = @franchise_set.franchises.build\n\n respond_with(@franchise_set, @franchise)\n end",
"def create \n @franchise_set = FranchiseSet.new(params[:franchise_set])\n\n respond_to do |format|\n if @franchise_set.save\n format.html { redirect_to \"/franchise_sets/\", notice: 'Franchise set was successfully created.' }\n format.json { render json: @franchise_set, status: :created, location: @franchise_set }\n else\n format.html { render action: \"new\" }\n format.json { render json: @franchise_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n respond_to do |format|\n if @franchise.save\n flash[:notice] = 'Franchise was successfully created.'\n format.html { redirect_to(@franchise) }\n format.xml { render :xml => @franchise, :status => :created, :location => @franchise }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @franchise.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @frat = Frat.new(frat_params)\n\n respond_to do |format|\n if @frat.save\n format.html { redirect_to @frat, notice: 'Frat was successfully created.' }\n format.json { render action: 'show', status: :created, location: @frat }\n else\n format.html { render action: 'new' }\n format.json { render json: @frat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fundraiser = Fundraiser.new(params[:fundraiser])\n\n respond_to do |format|\n if @fundraiser.save\n format.html { redirect_to @fundraiser, notice: 'Fundraiser was successfully created.' }\n format.json { render json: @fundraiser, status: :created, location: @fundraiser }\n else\n format.html { render action: \"new\" }\n format.json { render json: @fundraiser.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fund = Fund.new(fund_params)\n\n if @fund.save\n render json: @fund, status: :created, location: @fund\n else\n render json: @fund.errors, status: :unprocessable_entity\n end\n end",
"def create\n @forest = Forest.new(params[:forest])\n\n respond_to do |format|\n if @forest.save\n format.html { redirect_to @forest, notice: 'Forest was successfully created.' }\n format.json { render json: @forest, status: :created, location: @forest }\n else\n format.html { render action: \"new\" }\n format.json { render json: @forest.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @france = France.new(france_params)\n\n respond_to do |format|\n if @france.save\n format.html { redirect_to @france, notice: 'France was successfully created.' }\n format.json { render :show, status: :created, location: @france }\n else\n format.html { render :new }\n format.json { render json: @france.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n @frasco = Frasco.new(frasco_params)\n\n respond_to do |format|\n if @frasco.save\n format.html { redirect_to @frasco, notice: 'Frasco was successfully created.' }\n format.json { render :show, status: :created, location: @frasco }\n else\n format.html { render :new }\n format.json { render json: @frasco.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @frais_hebergement = FraisHebergement.new(params[:frais_hebergement])\n\n respond_to do |format|\n if @frais_hebergement.save\n format.html { redirect_to @frais_hebergement, :notice => 'Le frais d\\'hébergement a bien été créé' }\n format.json { render :json => @frais_hebergement, :status => :created, :location => @frais_hebergement }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @frais_hebergement.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @fridge = Fridge.new(params[:fridge])\n\n respond_to do |format|\n if @fridge.save\n format.html { redirect_to @fridge, notice: 'Fridge was successfully created.' }\n format.json { render json: @fridge, status: :created, location: @fridge }\n else\n format.html { render action: \"new\" }\n format.json { render json: @fridge.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @forest = Forest.new(forest_params)\n\n respond_to do |format|\n if @forest.save\n format.html { redirect_to @forest, notice: 'Forest was successfully created.' }\n format.json { render action: 'show', status: :created, location: @forest }\n else\n format.html { render action: 'new' }\n format.json { render json: @forest.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @frete = Frete.new(params[:frete])\n\n respond_to do |format|\n if @frete.save\n format.html { redirect_to @frete, notice: 'Frete was successfully created.' }\n format.json { render json: @frete, status: :created, location: @frete }\n else\n format.html { render action: \"new\" }\n format.json { render json: @frete.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fiestum = Fiestum.new(fiestum_params)\n\n respond_to do |format|\n if @fiestum.save\n format.html { redirect_to @fiestum, notice: 'Fiestum was successfully created.' }\n format.json { render :show, status: :created, location: @fiestum }\n else\n format.html { render :new }\n format.json { render json: @fiestum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # @frind = Frind.new(frind_params)\n @frind = current_user.frinds.build(frind_params)\n respond_to do |format|\n if @frind.save\n format.html { redirect_to @frind, notice: 'Frind was successfully created.' }\n format.json { render :show, status: :created, location: @frind }\n else\n format.html { render :new }\n format.json { render json: @frind.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @frais_annex = FraisAnnex.new(params[:frais_annex])\n\n respond_to do |format|\n if @frais_annex.save\n format.html { redirect_to @frais_annex, :notice => 'Le frais annexe a bien été créé' }\n format.json { render :json => @frais_annex, :status => :created, :location => @frais_annex }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @frais_annex.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @freind = Freind.new(freind_params)\n\n respond_to do |format|\n if @freind.save\n format.html { redirect_to @freind, notice: \"Freind was successfully created.\" }\n format.json { render :show, status: :created, location: @freind }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @freind.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fact = @cat.facts.create!({ fact: api_request })\n if @fact.save\n render json: @fact\n else\n render error: { message: \"Não foi possível criar esse fato para este gatinho! :(\" }, status: 400\n end\n end",
"def create\n @surgery = Surgery.new(params[:surgery])\n\n respond_to do |format|\n if @surgery.save\n format.html { redirect_to @surgery, notice: 'Surgery was successfully created.' }\n format.json { render json: @surgery, status: :created, location: @surgery }\n else\n format.html { render action: \"new\" }\n format.json { render json: @surgery.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @surfari = Surfari.new(surfari_params)\n\n respond_to do |format|\n if @surfari.save\n format.html { redirect_to surfaris_path, notice: 'Surfari was successfully created.' }\n format.json { render :show, status: :created, location: @surfari }\n else\n format.html { render :new }\n format.json { render json: @surfari.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @furdo = Furdo.new(furdo_params)\n\n respond_to do |format|\n if @furdo.save\n format.html { redirect_to @furdo, notice: 'Furdo was successfully created.' }\n format.json { render :show, status: :created, location: @furdo }\n else\n format.html { render :new }\n format.json { render json: @furdo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @franchisee_royalty = FranchiseeRoyalty.new(franchisee_royalty_params)\n\n respond_to do |format|\n if @franchisee_royalty.save\n format.html { redirect_to @franchisee_royalty, notice: 'Franchisee royalty was successfully created.' }\n format.json { render :show, status: :created, location: @franchisee_royalty }\n else\n format.html { render :new }\n format.json { render json: @franchisee_royalty.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @frais_repa = FraisRepa.new(params[:frais_repa])\n\n respond_to do |format|\n if @frais_repa.save\n format.html { redirect_to @frais_repa, :notice => 'Le frais de repas a bien été créé' }\n format.json { render :json => @frais_repa, :status => :created, :location => @frais_repa }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @frais_repa.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @featuring = Featuring.new(featuring_params)\n\n respond_to do |format|\n if @featuring.save\n format.html { redirect_to @featuring, notice: 'Featuring was successfully created.' }\n format.json { render :show, status: :created, location: @featuring }\n else\n format.html { render :new }\n format.json { render json: @featuring.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new \n @franchise_set = FranchiseSet.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @franchise_set }\n end\n end",
"def create\n origin = params[:fare][:origin]\n destination = params[:fare][:destination]\n\n \tuber = calculate_uber(origin, destination)\n lyft = calculate_lyft(origin, destination)\n\n if uber and lyft\n render json: [\n {\n company: 'uber', \n id: 1, \n price: uber, \n image: '/assets/uber.png'\n },\n {\n company: 'lyft',\n id: 2,\n price: lyft,\n image: '/assets/lyft.png'\n }], status: 200\n else\n render status: 500\n end\n\n end",
"def create\n @fishery = @country.fisheries.build(fishery_params)\n\n respond_to do |format|\n if @fishery.save\n format.html { redirect_to country_fisheries_path(@country), notice: \"Fishery was successfully created.\" }\n format.json { render :show, status: :created, location: @fishery }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @fishery.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fraternity = Fraternity.new(fraternity_params)\n\n respond_to do |format|\n if @fraternity.save\n format.html { redirect_to @fraternity, notice: 'Fraternity was successfully created.' }\n format.json { render :show, status: :created, location: @fraternity }\n else\n format.html { render :new }\n format.json { render json: @fraternity.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @disfraz = Disfraz.new(disfraz_params)\n\n respond_to do |format|\n if @disfraz.save\n format.html { redirect_to @disfraz, notice: 'Disfraz was successfully created.' }\n format.json { render :show, status: :created, location: @disfraz }\n else\n format.html { render :new }\n format.json { render json: @disfraz.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @familium = Familium.new(params[:familium])\n\n respond_to do |format|\n if @familium.save\n format.html { redirect_to familia_path, notice: 'Familia fue creada exitosamente.' }\n format.json { render json: @familium, status: :created, location: @familium }\n else\n format.html { render action: \"new\" }\n format.json { render json: @familium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @facture = Facture.new(facture_params)\n\n respond_to do |format|\n if @facture.save\n format.html { redirect_to add_ref_path(@facture), notice: 'La facture a bien été créée.'} \n format.json { render :add_ref, status: :created, location: @facture }\n else\n format.html { render :new }\n format.json { render json: @facture.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @familium = Familium.new(familium_params)\n\n respond_to do |format|\n if @familium.save\n format.html { redirect_to @familium, notice: 'Familium was successfully created.' }\n format.json { render :show, status: :created, location: @familium }\n else\n format.html { render :new }\n format.json { render json: @familium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fpsignal = Fpsignal.new(fpsignal_params)\n\n respond_to do |format|\n if @fpsignal.save\n format.html { redirect_to fpsignals_path, notice: 'Запись успешно добавлена.' }\n format.json { render :show, status: :created, location: fpsignals_path }\n else\n format.html { render :new }\n format.json { render json: @fpsignal.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fund = Fund.new(fund_params)\n\n if @fund.save\n render json: @fund, status: :created,\n location: api_v1_fund_path(@fund)\n else\n render_validation_errors @fund.errors\n end\n end",
"def create\n @facture = Facture.new(facture_params)\n\n respond_to do |format|\n if @facture.save\n format.html { redirect_to @facture, notice: 'Facture was successfully created.' }\n format.json { render :show, status: :created, location: @facture }\n else\n format.html { render :new }\n format.json { render json: @facture.errors, status: :unprocessable_entity }\n end\n end\n end",
"def creacion\n fiesta = Fiesta.new (params[:id])\n if Fiesta.save\n puts \"su fiesta a sido registrada\"\n else \n puts \"su fiesta no a sido registrada\"\n end\n render = json: fiesta \n end",
"def create\n @firmagideri = Firmagideri.new(firmagideri_params)\n\n respond_to do |format|\n if @firmagideri.save\n format.html { redirect_to firmagideris_path, notice: 'Kayıt başarıyla oluşturuldu.' }\n format.json { render :show, status: :created, location: @firmagideri }\n else\n format.html { render :new }\n format.json { render json: @firmagideri.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fastener = Fastener.new(fastener_params)\n\n respond_to do |format|\n if @fastener.save\n format.html { redirect_to @fastener, notice: 'Fastener was successfully created.' }\n format.json { render :show, status: :created, location: @fastener }\n else\n format.html { render :new }\n format.json { render json: @fastener.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fournisseur = Fournisseur.new(fournisseur_params)\n\n respond_to do |format|\n if @fournisseur.save\n format.html { redirect_to @fournisseur, notice: 'Fournisseur was successfully created.' }\n format.json { render :show, status: :created, location: @fournisseur }\n else\n format.html { render :new }\n format.json { render json: @fournisseur.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fatura = Fatura.new(fatura_params)\n\n respond_to do |format|\n if @fatura.save\n format.html { redirect_to @fatura, notice: 'Fatura was successfully created.' }\n format.json { render :show, status: :created, location: @fatura }\n else\n format.html { render :new }\n format.json { render json: @fatura.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fatura = Fatura.new(fatura_params)\n\n respond_to do |format|\n if @fatura.save\n format.html { redirect_to @fatura, notice: 'Fatura was successfully created.' }\n format.json { render :show, status: :created, location: @fatura }\n else\n format.html { render :new }\n format.json { render json: @fatura.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @surgery = Surgery.new(surgery_params)\n\n respond_to do |format|\n if @surgery.save\n format.html { redirect_to @surgery, notice: 'Surgery was successfully created.' }\n format.json { render :show, status: :created, location: @surgery }\n else\n format.html { render :new }\n format.json { render json: @surgery.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @flower = Flower.new(params[:flower])\n\n respond_to do |format|\n if @flower.save\n format.html { redirect_to @flower, notice: 'Flower was successfully created.' }\n format.json { render json: @flower, status: :created, location: @flower }\n else\n format.html { render action: \"new\" }\n format.json { render json: @flower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def franchisee_params\n params.require(:franchisee).permit!#(:franchisee_type_id, :location, :parent_id, :franchisee_personal=>[:id, :first_name, :middle_name, :last_name, :dob, :age, :occupation, :experience, :no_of_owners], :franchisee_contact=>[:id, :address, :city, :state, :country, :email_id, :contact_no, :land_line], :franchisee_agreement=>[:id, :agreement_date, :duration, :renewal_date, :location, :no_of_centers, :advance_amount_gst, :balance_amount_gst, :no_of_installment, :center_address, :city, :state, :pincode])\n end",
"def create\n @title = t('view.firms.new_title')\n @firm = Firm.new(params[:firm])\n\n respond_to do |format|\n if @firm.save\n format.html { redirect_to @firm, notice: t('view.firms.correctly_created') }\n format.json { render json: @firm, status: :created, location: @firm }\n else\n format.html { render action: 'new' }\n format.json { render json: @firm.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @factores_fluctuante = FactoresFluctuante.new(factores_fluctuante_params)\n\n respond_to do |format|\n if @factores_fluctuante.save\n format.html { redirect_to @factores_fluctuante, notice: 'Factores fluctuante was successfully created.' }\n format.json { render :show, status: :created, location: @factores_fluctuante }\n else\n format.html { render :new }\n format.json { render json: @factores_fluctuante.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @falta = Falta.new(falta_params)\n\n respond_to do |format|\n if @falta.save\n format.html { redirect_to @falta, notice: \"Falta was successfully created.\" }\n format.json { render :show, status: :created, location: @falta }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @falta.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @nfer = Nfer.new(params[:nfer])\n\n respond_to do |format|\n if @nfer.save\n format.html { redirect_to @nfer, notice: 'Nfer was successfully created.' }\n format.json { render json: @nfer, status: :created, location: @nfer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @nfer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @detalle_factura = DetalleFactura.new(detalle_factura_params)\n\n respond_to do |format|\n if @detalle_factura.save\n format.html { redirect_to @detalle_factura, notice: 'Detalle factura was successfully created.' }\n format.json { render :show, status: :created, location: @detalle_factura }\n else\n format.html { render :new }\n format.json { render json: @detalle_factura.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @furniture = Furniture.new(furniture_params)\n respond_to do |format|\n if @furniture.save\n format.html { redirect_to @furniture, notice: \"Furniture was successfully created.\" }\n format.json { render :show, status: :created, location: @furniture }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @furniture.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n # Rails.logger.warn \"====================\"\n # Rails.logger.warn foaf_params[:interests_attributes]\n # Rails.logger.warn \"====================\"\n\n \n @foaf = Foaf.new(name: foaf_params[:name], work: foaf_params[:work], \n slug: foaf_params[:slug], birthday: foaf_params[:birthday])\n\n if(foaf_params.has_key?(:interests_attributes))\n interest_ids = foaf_params[:interests_attributes].split(\",\").map { |s| s.to_i }\n interest_ids.each do |i|\n @foaf.interests << Interest.find(i)\n end\n end\n\n respond_to do |format|\n if @foaf.save \n format.html { redirect_to @foaf, notice: 'FOAF was successfully created.' }\n format.json { render action: 'show', status: :created, location: @foaf }\n else\n format.html { render action: 'new' }\n format.json { render json: @foaf.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @internship = Internship.new(params[:internship])\n @internship.description.strip!\n @internship.owner_hash = cookies[:hash] \n \n fields = params[:fields].split(\",\")\n fields.each do |desc|\n @internship.fields << Field.new(:description => desc.strip)\n end\n\n respond_to do |format|\n if @internship.save\n #format.html { redirect_to @internship }\n format.js\n #format.json { render :json => @internship, :status => :created, :location => @internship }\n else\n format.js {render :action => :create_error }\n #format.html { render :json => @internship.errors, :status => :unprocessable_entity }\n end\n end \n end",
"def create\n @famille = Famille.new(famille_params)\n\n respond_to do |format|\n if @famille.save\n format.html { redirect_to @famille, notice: 'Famille was successfully created.' }\n format.json { render :show, status: :created, location: @famille }\n\t else\n format.html { render :new }\n end\n end\n end",
"def create\n @fondo = Fondo.new(fondo_params)\n\n respond_to do |format|\n if @fondo.save\n format.html { redirect_to @fondo, notice: 'Fondo was successfully created.' }\n format.json { render :show, status: :created, location: @fondo }\n else\n format.html { render :new }\n format.json { render json: @fondo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fridge_food = FridgeFood.new(params[:fridge_food])\n\n respond_to do |format|\n if @fridge_food.save\n format.html { redirect_to(@fridge_food, :notice => 'Fridge food was successfully created.') }\n format.xml { render :xml => @fridge_food, :status => :created, :location => @fridge_food }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @fridge_food.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @filial = Filial.new(params[:filial])\n\n respond_to do |format|\n if @filial.save\n format.html { redirect_to filials_path, notice: 'Filial was successfully created.' }\n format.json { render json: @filial, status: :created, location: @filial }\n else\n format.html { render action: \"new\" }\n format.json { render json: @filial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n # Rails.logger.warn \"====================\"\n # Rails.logger.warn foaf_params[:interests_attributes]\n # Rails.logger.warn \"====================\"\n\n \n @foaf = Foaf.new(name: foaf_params[:name], work: foaf_params[:work], \n slug: foaf_params[:slug], birthday: foaf_params[:birthday])\n\n if(foaf_params.has_key?(:interests_attributes))\n interest_ids = foaf_params[:interests_attributes].split(\",\").map { |s| s.to_i }\n interest_ids.each do |i|\n @foaf.interests << Interest.find(i)\n end\n end\n\n respond_to do |format|\n if @foaf.save \n format.html { redirect_to @foaf, notice: 'Foaf was successfully created.' }\n format.json { render action: 'show', status: :created, location: @foaf }\n else\n format.html { render action: 'new' }\n format.json { render json: @foaf.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @dev_folio = DevFolio.new(params[:dev_folio])\n\n respond_to do |format|\n if @dev_folio.save\n format.html { redirect_to @dev_folio, notice: 'Dev folio was successfully created.' }\n format.json { render json: @dev_folio, status: :created, location: @dev_folio }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dev_folio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fornecedor = Fornecedor.new(fornecedor_params)\n\n respond_to do |format|\n if @fornecedor.save\n format.html { redirect_to @fornecedor, notice: 'Fornecedor was successfully created.' }\n format.json { render :show, status: :created, location: @fornecedor }\n else\n format.html { render :new }\n format.json { render json: @fornecedor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @dfactura = Dfactura.new(dfactura_params)\n\n respond_to do |format|\n if @dfactura.save\n format.html { redirect_to @dfactura, notice: 'Detalle de factura creado con éxito.' }\n format.json { render :show, status: :created, location: @dfactura }\n else\n format.html { render :new }\n format.json { render json: @dfactura.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @stage_fecundity = StageFecundity.new(params[:stage_fecundity])\n\n respond_to do |format|\n if @stage_fecundity.save\n format.html { redirect_to @stage_fecundity, notice: 'Stage fecundity was successfully created.' }\n format.json { render json: @stage_fecundity, status: :created, location: @stage_fecundity }\n else\n format.html { render action: \"new\" }\n format.json { render json: @stage_fecundity.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @filial = Filial.new(params[:filial])\n\n respond_to do |format|\n if @filial.save\n format.html { redirect_to @filial, notice: 'Filial was successfully created.' }\n format.json { render json: @filial, status: :created, location: @filial }\n else\n format.html { render action: \"new\" }\n format.json { render json: @filial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fish_poly = FishPoly.new(fish_poly_params)\n\n respond_to do |format|\n if @fish_poly.save\n format.html { redirect_to @fish_poly, notice: 'Fish poly was successfully created.' }\n format.json { render :show, status: :created, location: @fish_poly }\n else\n format.html { render :new }\n format.json { render json: @fish_poly.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n render :json => @fiestas.push(params[:fiesta])\n end",
"def create\n @flower = Flower.new(flower_params)\n\n respond_to do |format|\n if @flower.save\n format.html { redirect_to @flower, notice: 'Flower was successfully created.' }\n format.json { render :show, status: :created, location: @flower }\n else\n format.html { render :new }\n format.json { render json: @flower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fec_filing = FecFiling.new(params[:fec_filing])\n\n respond_to do |format|\n if @fec_filing.save\n format.html { redirect_to @fec_filing, :notice => 'Fec filing was successfully created.' }\n format.json { render :json => @fec_filing, :status => :created, :location => @fec_filing }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @fec_filing.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @fec_filing = FecFiling.new(params[:fec_filing])\n\n respond_to do |format|\n if @fec_filing.save\n format.html { redirect_to @fec_filing, :notice => 'Fec filing was successfully created.' }\n format.json { render :json => @fec_filing, :status => :created, :location => @fec_filing }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @fec_filing.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @feild = Feild.new(feild_params)\n\n respond_to do |format|\n if @feild.save\n format.html { redirect_to @feild, notice: 'Feild was successfully created.' }\n format.json { render :show, status: :created, location: @feild }\n else\n format.html { render :new }\n format.json { render json: @feild.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fertilizante = Fertilizante.new(fertilizante_params)\n\n respond_to do |format|\n if @fertilizante.save\n format.html { redirect_to @fertilizante, notice: 'Fertilizante was successfully created.' }\n format.json { render :show, status: :created, location: @fertilizante }\n else\n format.html { render :new }\n format.json { render json: @fertilizante.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @betraege = Betraege.new(params[:betraege])\n\n respond_to do |format|\n if @betraege.save\n format.html { redirect_to @betraege, :notice => 'Betraege was successfully created.' }\n format.json { render :json => @betraege, :status => :created, :location => @betraege }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @betraege.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @feira = Feira.new(feira_params)\n\n respond_to do |format|\n if @feira.save\n format.html { redirect_to @feira, notice: 'Feira was successfully created.' }\n format.json { render :show, status: :created, location: @feira }\n else\n format.html { render :new }\n format.json { render json: @feira.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fabrica = Fabrica.new(fabrica_params)\n\n respond_to do |format|\n if @fabrica.save\n format.html { redirect_to @fabrica, notice: 'Fabrica was successfully created.' }\n format.json { render action: 'show', status: :created, location: @fabrica }\n else\n format.html { render action: 'new' }\n format.json { render json: @fabrica.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fundraiser = Fundraiser.new(fundraiser_params)\n @fundraiser.owner_id = current_user.id\n @fundraiser.start_date = Date.current\n @fundraiser.raised = 0\n respond_to do |format|\n if @fundraiser.save\n format.html { redirect_to @fundraiser, notice: 'Fundraiser was successfully created.' }\n format.json { render :show, status: :created, location: @fundraiser }\n else\n format.html { render :new }\n format.json { render json: @fundraiser.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @flight = Flight.new(params[:flight])\n\n if @flight.save\n render json: @flight, status: :created, location: @flight\n else\n render json: @flight.errors, status: :unprocessable_entity\n end\n end",
"def create\n @investment_fund = InvestmentFund.new(investment_fund_params)\n\n respond_to do |format|\n if @investment_fund.save\n format.html { redirect_to @investment_fund, notice: 'Investment fund was successfully created.' }\n format.json { render :show, status: :created, location: @investment_fund }\n else\n format.html { render :new }\n format.json { render json: @investment_fund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @investment_fund = InvestmentFund.new(investment_fund_params)\n\n respond_to do |format|\n if @investment_fund.save\n format.html { redirect_to @investment_fund, notice: 'Investment fund was successfully created.' }\n format.json { render :show, status: :created, location: @investment_fund }\n else\n format.html { render :new }\n format.json { render json: @investment_fund.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @visitation = Visitation.new(visitation_params)\n\n respond_to do |format|\n if @visitation.save\n format.html { redirect_to @visitation, notice: 'Visitation was successfully created.' }\n format.json { render json: @visitation, status: :created, location: @visitation }\n else\n format.html { render action: \"new\" }\n format.json { render json: @visitation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fast_food = FastFood.new(fast_food_params)\n\n respond_to do |format|\n if @fast_food.save\n format.html { redirect_to @fast_food, notice: 'Fast food was successfully created.' }\n format.json { render :show, status: :created, location: @fast_food }\n else\n format.html { render :new }\n format.json { render json: @fast_food.errors, status: :unprocessable_entity }\n end\n end\n end",
"def franchise_params\n params.require(:franchise).permit(:name, :merchant_key, :location, :latitude, :longitude)\n end",
"def create\n saved = false\n begin\n ActiveRecord::Base.transaction do\n @financer = Financer.new(financer_params)\n @financer.save!\n saved = true\n end\n rescue ActiveRecord::RecordInvalid\n end\n\n respond_to do |format|\n if saved\n format.html { redirect_to @financer, notice: 'Financer was successfully created.' }\n format.json { render :show, status: :created, location: @financer }\n else\n format.html { render :new }\n format.json { render json: @financer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @facturaventum = Facturaventum.new(facturaventum_params)\n\n respond_to do |format|\n if @facturaventum.save\n format.html { redirect_to @facturaventum, notice: 'Facturaventum was successfully created.' }\n format.json { render :show, status: :created, location: @facturaventum }\n else\n format.html { render :new }\n format.json { render json: @facturaventum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @desafio = Desafio.new(desafio_params)\n\n respond_to do |format|\n if @desafio.save\n format.html { redirect_to @desafio, notice: 'Desafio was successfully created.' }\n format.json { render :show, status: :created, location: @desafio }\n else\n format.html { render :new }\n format.json { render json: @desafio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fish = Fish.new(fish_params)\n\n respond_to do |format|\n if @fish.save\n format.html { redirect_to @fish, notice: \"Fish was successfully created.\" }\n format.json { render :show, status: :created, location: @fish }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @fish.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fase = Fase.new(fase_params)\n\n respond_to do |format|\n if @fase.save\n format.html { redirect_to @fase, notice: 'Fase was successfully created.' }\n format.json { render :show, status: :created, location: @fase }\n else\n format.html { render :new }\n format.json { render json: @fase.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @diet = Diet.new(diet_params)\n @diet.user = @current_user\n\n if @diet.save\n render json: @diet, status: 201, location: @diet, root: true\n else\n render json: @diet.errors, status: 422\n end\n end",
"def create\n @foiltype = Foiltype.new(params[:foiltype])\n\n respond_to do |format|\n if @foiltype.save\n format.html { redirect_to @foiltype, notice: 'Foil Type was successfully created.' }\n format.json { render json: @foiltype, status: :created, location: @foiltype }\n else\n format.html { render action: \"new\" }\n format.json { render json: @foiltype.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @finance = Finance.new(params[:finance])\n\n respond_to do |format|\n if @finance.save\n format.html { redirect_to @finance, notice: 'Finance was successfully created.' }\n format.json { render json: @finance, status: :created, location: @finance }\n else\n format.html { render action: \"new\" }\n format.json { render json: @finance.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @nature_financing = NatureFinancing.new(nature_financing_params)\n @nature_financing.institute = current_institute\n\n respond_to do |format|\n if @nature_financing.save\n format.html { redirect_to @nature_financing, notice: 'Nature financing was successfully created.' }\n format.json { render :show, status: :created, location: @nature_financing }\n else\n format.html { render :new }\n format.json { render json: @nature_financing.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @enfermedade = Enfermedade.new(enfermedade_params)\n\n respond_to do |format|\n if @enfermedade.save\n format.html { redirect_to new_padecimiento_path, notice: 'Ahora, por favor dinos a qué eres alérgico' }\n\n else\n format.html { render action: 'new' }\n format.json { render json: @enfermedade.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n param = furefure_params_for_create\n\n @user = User.find_by( uuid: param[\"user_uuid\"] );\n @channel = Channel.find_by( uuid: param[\"channel_uuid\"] );\n\n p @user;\n p @channel\n\n @furefure = Furefure.new( { user: @user, channel: @channel, at_time_sec: param[:at_time_sec] } )\n\n if @furefure.save\n render json: nil\n else\n render json: @furefure.errors, status: :unprocessable_entity\n end\n end",
"def create\n @farmer = Farmer.new(params[:farmer])\n\n respond_to do |format|\n if @farmer.save\n format.html { redirect_to @farmer, notice: 'Farmer was successfully created.' }\n format.json { render json: @farmer, status: :created, location: @farmer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @farmer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @flower = current_user.flowers.build(flower_params)\n\n respond_to do |format|\n if @flower.save\n format.html { redirect_to @flower, notice: 'Flower was successfully created.' }\n format.json { render :show, status: :created, location: @flower }\n else\n format.html { render :new }\n format.json { render json: @flower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fish = Fish.new(fish_params)\n\n respond_to do |format|\n if @fish.save\n format.html { redirect_to @fish, notice: 'Fish was successfully created.' }\n format.json { render :show, status: :created, location: @fish }\n else\n format.html { render :new }\n format.json { render json: @fish.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fish = Fish.new(fish_params)\n\n respond_to do |format|\n if @fish.save\n format.html { redirect_to @fish, notice: 'Fish was successfully created.' }\n format.json { render :show, status: :created, location: @fish }\n else\n format.html { render :new }\n format.json { render json: @fish.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @pago_factura = PagoFactura.new(pago_factura_params)\n\n respond_to do |format|\n if @pago_factura.save\n format.html { redirect_to @pago_factura, notice: 'Pago factura creado con éxito.' }\n format.json { render :show, status: :created, location: @pago_factura }\n else\n format.html { render :new }\n format.json { render json: @pago_factura.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @departamento = Departamento.new(departamento_params)\n\n if @departamento.save\n render json: @departamento, status: :created, location: @departamento\n else\n render json: @departamento.errors, status: :unprocessable_entity\n end\n end",
"def create\n @visitation = Visitation.new(visitation_params)\n\n respond_to do |format|\n if @visitation.save\n format.html { redirect_to @visitation, notice: 'Visitation was successfully created.' }\n format.json { render :show, status: :created, location: @visitation }\n else\n format.html { render :new }\n format.json { render json: @visitation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fishing_method = FishingMethod.new(params[:fishing_method])\n\n respond_to do |format|\n if @fishing_method.save\n format.html { redirect_to @fishing_method, notice: 'Fishing method was successfully created.' }\n format.json { render json: @fishing_method, status: :created, location: @fishing_method }\n else\n format.html { render action: \"new\" }\n format.json { render json: @fishing_method.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.74402696",
"0.7076273",
"0.68256736",
"0.6690302",
"0.6678855",
"0.6668468",
"0.6616737",
"0.64184356",
"0.63953596",
"0.6390258",
"0.6383752",
"0.6368777",
"0.63076323",
"0.62814444",
"0.6252537",
"0.62483674",
"0.622775",
"0.6221251",
"0.62183034",
"0.6193696",
"0.6188447",
"0.6163762",
"0.61597914",
"0.6133175",
"0.6124804",
"0.61113375",
"0.61080396",
"0.60996324",
"0.608548",
"0.6081418",
"0.60794127",
"0.60738456",
"0.6073291",
"0.6063827",
"0.60589534",
"0.6045243",
"0.60408473",
"0.60405904",
"0.6031117",
"0.60221714",
"0.60207945",
"0.6015827",
"0.6015827",
"0.59969777",
"0.59882265",
"0.5978094",
"0.5968735",
"0.5956132",
"0.5948164",
"0.59305936",
"0.59266186",
"0.59264725",
"0.59263986",
"0.59227294",
"0.5913618",
"0.59074706",
"0.58938915",
"0.58932483",
"0.5887792",
"0.5879815",
"0.5878301",
"0.5873712",
"0.58723086",
"0.58652973",
"0.58613735",
"0.58603054",
"0.58599854",
"0.58492416",
"0.58492416",
"0.5843746",
"0.58418596",
"0.58384335",
"0.5837464",
"0.5830818",
"0.5829074",
"0.58259594",
"0.58259517",
"0.58259517",
"0.5822284",
"0.581029",
"0.5805916",
"0.58018667",
"0.58015066",
"0.5800921",
"0.5796912",
"0.57961804",
"0.5793828",
"0.5792162",
"0.5789902",
"0.57886195",
"0.5787525",
"0.57864326",
"0.57771176",
"0.577092",
"0.5770727",
"0.5770727",
"0.57706326",
"0.5766552",
"0.57634395",
"0.57616997"
] | 0.6240462 | 16 |
PATCH/PUT /franchises/1 PATCH/PUT /franchises/1.json | def update
if @franchise.address
@franchise.address.update(address_params) unless address_params.empty?
else
@address = Address.create(address_params)
@franchise.address_id = @address.id
@franchise.save
end
respond_to do |format|
if @franchise.update(franchise_params)
if franchise_params[:notes_attributes]
@note = @franchise.notes.last
@franchise.track 'Note Created', current_user, @note
end
if franchise_params[:uploads_attributes]
@upload = @franchise.uploads.last
@franchise.track 'File Uploaded', current_user, @upload
end
format.html { redirect_to @franchise, notice: 'Franchise was successfully updated.' }
format.json { render :show, status: :ok, location: @franchise }
else
format.html { render :edit }
format.json { render json: @franchise.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update \n @franchise = @franchise_set.franchises.find(params[:id])\n\n respond_to do |format|\n franchise_set = FranchiseSet.find(params[:franchise_set_id])\n if @franchise.update_attributes(params[:franchise])\n format.html { redirect_to \"/franchise_sets/#{franchise_set.id}/edit\" , notice: 'Franchise was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @franchise.update(franchise_params)\n format.html { redirect_to [@client, @location, @franchise], notice: 'Franchise was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @franchise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @frat.update(frat_params)\n format.html { redirect_to @frat, notice: 'Frat was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @frat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update \n @franchise_set = FranchiseSet.find(params[:id])\n\n respond_to do |format|\n if @franchise_set.update_attributes(params[:franchise_set])\n format.html { redirect_to \"/franchise_sets/\", notice: 'Franchise set was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @franchise_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n @foaf.interests.clear\n\n if(foaf_params.has_key?(:interests_attributes))\n interest_ids = foaf_params[:interests_attributes].split(\",\").map { |s| s.to_i }\n interest_ids.each do |i|\n @foaf.interests << Interest.find(i)\n #@foaf.update(Interest.find(i))\n end\n \n end\n\n respond_to do |format|\n if @foaf.update(name: foaf_params[:name], work: foaf_params[:work], \n slug: foaf_params[:slug], birthday: foaf_params[:birthday])\n format.html { redirect_to @foaf, notice: 'FOAF was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @foaf.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @fridge = Fridge.find(params[:id])\n\n respond_to do |format|\n if @fridge.update_attributes(params[:fridge])\n format.html { redirect_to @fridge, notice: 'Fridge was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fridge.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @spoofer = Spoofer.find(params[:id])\n\n respond_to do |format|\n if @spoofer.update_attributes(params[:spoofer])\n format.html { redirect_to @spoofer, notice: 'Spoofer was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @spoofer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n @foaf.interests.clear\n\n if(foaf_params.has_key?(:interests_attributes))\n interest_ids = foaf_params[:interests_attributes].split(\",\").map { |s| s.to_i }\n interest_ids.each do |i|\n @foaf.interests << Interest.find(i)\n #@foaf.update(Interest.find(i))\n end\n \n end\n\n respond_to do |format|\n if @foaf.update(name: foaf_params[:name], work: foaf_params[:work], \n slug: foaf_params[:slug], birthday: foaf_params[:birthday])\n format.html { redirect_to @foaf, notice: 'Foaf was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @foaf.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @franchise.update_attributes(params[:franchise])\n flash[:notice] = 'Franchise was successfully updated.'\n format.html { redirect_to(@franchise) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @franchise.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end",
"def update\n @fucker = Fucker.find(params[:id])\n\n respond_to do |format|\n if @fucker.update_attributes(params[:fucker])\n format.json { head :no_content }\n else\n format.json { render json: @fucker.errors, status: :internal_server_error }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fabrica.update(fabrica_params)\n format.html { redirect_to @fabrica, notice: 'Fabrica was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @fabrica.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch!\n request! :patch\n end",
"def update\n respond_to do |format|\n if @frind.update(frind_params)\n format.html { redirect_to @frind, notice: 'Frind was successfully updated.' }\n format.json { render :show, status: :ok, location: @frind }\n else\n format.html { render :edit }\n format.json { render json: @frind.errors, status: :unprocessable_entity }\n end\n end\n end",
"def actualizacion \n fiesta.update (params[:id]) \n render json: fiesta\n end",
"def update\n respond_to do |format|\n if @franchisee.update(franchisee_params)\n @franchisee.parent = params[:parent] if params[:parent] \n @franchisee.update(parent: @franchisee.parent)\n @franchisee.update(no_of_owners: FranchiseeOwner.where(franchisee_id: @franchisee.id).count, latest_renewal: @franchisee.franchisee_agreement.renewal_date)\n @franchisee.franchisee_agreement.update(no_of_installment: InstallmentDetail.where(franchisee_id: @franchisee.id).count.to_i)\n format.html { redirect_to franchisees_url, notice: 'Franchisee was successfully updated.' }\n format.json { render :show, status: :ok, location: @franchisee }\n else\n format.html { render :edit }\n format.json { render json: @franchisee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch(path, params)\n time(\"PATCH #{path}\") { Cloudflarer.new.patch(path, params) }\n end",
"def update\n @fred = Fred.find(params[:id])\n\n respond_to do |format|\n if @fred.update_attributes(params[:fred])\n format.html { redirect_to @fred, notice: 'Fred was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fred.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fabricsofaset.update(fabricsofaset_params)\n format.html { redirect_to @fabricsofaset, notice: 'Fabricsofaset was successfully updated.' }\n format.json { render :show, status: :ok, location: @fabricsofaset }\n else\n format.html { render :edit }\n format.json { render json: @fabricsofaset.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fisier.update(fisier_params)\n format.html { redirect_to root_path, notice: 'Fisier was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @fisier.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @frete = Frete.find(params[:id])\n\n respond_to do |format|\n if @frete.update_attributes(params[:frete])\n format.html { redirect_to @frete, notice: 'Frete was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @frete.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch\n headers = {\"If-Match\" => @version}\n response = @context.request :patch, \"#{@path}/#{@id}\", @data.to_json, headers\n @version += 1\n response\n # 'X-HTTP-Method-Override' => 'PATCH'\n end",
"def update\n respond_to do |format|\n if @cof.update(cof_params)\n format.html { redirect_to :back }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @cof.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update # PATCH\n raise NotImplementedError\n end",
"def update!(params)\n res = @client.put(path, nil, params, \"Content-Type\" => \"application/json\")\n @attributes = res.json if res.status == 201\n res\n end",
"def update\n @festival = Festival.find(params[:id])\n\n respond_to do |format|\n if @festival.update_attributes(params[:festival])\n format.html { redirect_to @festival, notice: 'Festival was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @festival.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @france.update(france_params)\n format.html { redirect_to @france, notice: 'France was successfully updated.' }\n format.json { render :show, status: :ok, location: @france }\n else\n format.html { render :edit }\n format.json { render json: @france.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @golfer = Golfer.find(params[:id])\n\n respond_to do |format|\n if @golfer.update_attributes(params[:golfer])\n format.html { redirect_to @golfer, notice: 'Golfer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @golfer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fascicle.update(fascicle_params)\n format.html { redirect_to @fascicle, notice: 'Fascicle was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @fascicle.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n puts \"update #{@feeling.as_json} #{updated_params.as_json}\"\n respond_to do |format|\n if @feeling.update(updated_params)\n puts \"brucep update success\"\n #format.html { redirect_to @feeling, notice: 'Feeling was successfully updated.' }\n format.html { redirect_to new_feeling_path }\n format.json { render :show, status: :ok, location: @feeling }\n #format.js\n else\n format.html { render :edit }\n format.json { render json: @feeling.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @flight = Flight.find(params[:id])\n\n if @flight.update(params[:flight])\n head :no_content\n else\n render json: @flight.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @falta.update(falta_params)\n format.html { redirect_to @falta, notice: \"Falta was successfully updated.\" }\n format.json { render :show, status: :ok, location: @falta }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @falta.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @agency = Agency.find(params[:id])\n\n if @agency.update(agency_params)\n #head :no_content\n render json: @agency, status: :accepted, location: @agency #sera? status accepted? \n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end",
"def update\n @flower = Flower.find(params[:id])\n\n respond_to do |format|\n if @flower.update_attributes(params[:flower])\n format.html { redirect_to @flower, notice: 'Flower was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @flower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fish_poly.update(fish_poly_params)\n format.html { redirect_to @fish_poly, notice: 'Fish poly was successfully updated.' }\n format.json { render :show, status: :ok, location: @fish_poly }\n else\n format.html { render :edit }\n format.json { render json: @fish_poly.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @factores_fluctuante.update(factores_fluctuante_params)\n format.html { redirect_to @factores_fluctuante, notice: 'Factores fluctuante was successfully updated.' }\n format.json { render :show, status: :ok, location: @factores_fluctuante }\n else\n format.html { render :edit }\n format.json { render json: @factores_fluctuante.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @forest.update(forest_params)\n format.html { redirect_to @forest, notice: 'Forest was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @forest.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @fabric = Fabric.find(params[:id])\n\n respond_to do |format|\n if @fabric.update_attributes(params[:fabric])\n format.html { redirect_to @fabric, notice: 'Fabric was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fabric.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @f = F.find(params[:id])\n\n respond_to do |format|\n if @f.update_attributes(params[:f])\n format.html { redirect_to software_package_fs_path(@software, @package) }\n format.js\n format.json { head :ok }\n else\n format.html { redirect_to software_package_fs_path(@software, @package) }\n format.js\n format.json { render json: @package.errors, status: :unprocessable_entity }\n end\n end\n \n end",
"def update\n respond_to do |format|\n @family.slug=nil\n if @family.update(family_params)\n format.html { redirect_to @family, notice: 'La familia fue actualizada exitosamente.' }\n format.json { render :show, status: :ok, location: @family }\n else\n format.html { render :edit }\n format.json { render json: @family.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @featuring.update(featuring_params)\n format.html { redirect_to @featuring, notice: 'Featuring was successfully updated.' }\n format.json { render :show, status: :ok, location: @featuring }\n else\n format.html { render :edit }\n format.json { render json: @featuring.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fondo.update(fondo_params)\n format.html { redirect_to @fondo, notice: 'Fondo was successfully updated.' }\n format.json { render :show, status: :ok, location: @fondo }\n else\n format.html { render :edit }\n format.json { render json: @fondo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fiestum.update(fiestum_params)\n format.html { redirect_to @fiestum, notice: 'Fiestum was successfully updated.' }\n format.json { render :show, status: :ok, location: @fiestum }\n else\n format.html { render :edit }\n format.json { render json: @fiestum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if request.content_type == \"application/json\"\n # .update is like a \"update people set ...\" in sql\n if @person.update(person_params)\n render json: @person\n else\n render json: @person.errors, status: :not_found\n end\n else\n render status: :bad_request\n end\n end",
"def update_plant\n @plant.deleted = false\n\n respond_to do |format|\n if set_attributes_from_filemaker(@plant)\n format.json do\n render status: :created,\n json: {\n id: @plant.id,\n botanical_name: @plant.botanical_name,\n alternative_names: @plant.alternative_names,\n updated_at: @plant.updated_at,\n visible: (!@plant.deleted).to_s\n }\n end\n else\n format.json do\n render json: @plant.errors, status: :unprocessable_entity\n end\n end\n end\n end",
"def update\n respond_to do |format|\n if @add_fuel.update(add_fuel_params)\n format.html { redirect_to @add_fuel, notice: (t 'add_fuels.title')+(t 'actions.updated') }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @add_fuel.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @forest = Forest.find(params[:id])\n\n respond_to do |format|\n if @forest.update_attributes(params[:forest])\n format.html { redirect_to @forest, notice: 'Forest was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @forest.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update(url, data)\n RestClient.put url, data, :content_type => :json\nend",
"def update\n respond_to do |format|\n if @formulary.update(formulary_params)\n format.html { redirect_to formularies_url, alert: I18n.t('activerecord.models.formulary') + I18n.t('helpers_locale.models.updated') }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @formulary.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n render json: Company.update(params[\"id\"], params[\"company\"])\n end",
"def patch options\n rest_request({ method: :patch }.merge(options))\n end",
"def patch options\n rest_request({ method: :patch }.merge(options))\n end",
"def update\n respond_to do |format|\n if @rest.update(rest_params)\n format.html { redirect_to @rest, notice: 'Rest was successfully updated.' }\n format.json { render :show, status: :ok, location: @rest }\n else\n format.html { render :edit }\n format.json { render json: @rest.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @fortune = Fortune.find(params[:id])\n\n respond_to do |format|\n if @fortune.update_attributes(params[:fortune])\n format.html { redirect_to @fortune, notice: 'Piosenka pozytywnie zaktualizowana.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fortune.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @feira.update(feira_params)\n format.html { redirect_to @feira, notice: 'Feira was successfully updated.' }\n format.json { render :show, status: :ok, location: @feira }\n else\n format.html { render :edit }\n format.json { render json: @feira.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @fundraiser = Fundraiser.find(params[:id])\n\n respond_to do |format|\n if @fundraiser.update_attributes(params[:fundraiser])\n format.html { redirect_to @fundraiser, notice: 'Fundraiser was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fundraiser.errors, status: :unprocessable_entity }\n end\n end\n end",
"def rest_patch(base_uri,json_payload,params)\n begin\n @response = RestClient.patch(base_uri,json_payload,params)\n rescue => e\n puts @response.code\n end\n return @response\n end",
"def update\n respond_to do |format|\n if @fish.update(fish_params)\n format.html { redirect_to @fish, notice: 'Fish was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @fish.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @frosting_flavor.update(frosting_flavor_params)\n format.html { redirect_to @frosting_flavor, notice: 'Frosting flavor was successfully updated.' }\n format.json { render :show, status: :ok, location: @frosting_flavor }\n else\n format.html { render :edit }\n format.json { render json: @frosting_flavor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @facture.update(facture_params)\n format.html { redirect_to add_ref_path(@facture), notice: 'La facture a bien été mise à jour.' }\n format.json { render :show, status: :ok, location: @facture }\n else\n format.html { render :edit }\n format.json { render json: @facture.errors, status: :unprocessable_entity }\n end\n end\n end",
"def rest_edit(path, options={}, &blk)\n callback = Proc.new { |*args|\n @object = yield(*args) or pass\n rest_params.each { |k, v| @object.send :\"#{k}=\", v unless k == 'id' }\n\n return 400, @object.errors.to_json unless @object.valid?\n\n @object.save\n rest_respond @object\n }\n\n # Make it work with `Backbone.emulateHTTP` on.\n put path, &callback\n post path, &callback\n end",
"def rest_edit(path, options={}, &blk)\n callback = Proc.new { |*args|\n @object = yield(*args) or pass\n rest_params.each { |k, v| @object.send :\"#{k}=\", v unless k == 'id' }\n\n return 400, @object.errors.to_json unless @object.valid?\n\n @object.save\n rest_respond @object\n }\n\n # Make it work with `Backbone.emulateHTTP` on.\n put path, &callback\n post path, &callback\n end",
"def update\n @fulcliente = Fulcliente.find(params[:id])\n\n respond_to do |format|\n if @fulcliente.update_attributes(params[:fulcliente])\n format.html { redirect_to @fulcliente, notice: 'Fulcliente was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fulcliente.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @flower.update(flower_params)\n format.html { redirect_to @flower, notice: 'Flower was successfully updated.' }\n format.json { render :show, status: :ok, location: @flower }\n else\n format.html { render :edit }\n format.json { render json: @flower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @flower.update(flower_params)\n format.html { redirect_to @flower, notice: 'Flower was successfully updated.' }\n format.json { render :show, status: :ok, location: @flower }\n else\n format.html { render :edit }\n format.json { render json: @flower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @festival.update(festival_params)\n format.html { redirect_to @festival, notice: 'Festival was successfully updated.' }\n format.json { render :show, status: :ok, location: @festival }\n else\n format.html { render :edit }\n format.json { render json: @festival.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @familium = Familium.find(params[:id])\n\n respond_to do |format|\n if @familium.update_attributes(params[:familium])\n format.html { redirect_to @familium, notice: 'Familia fue actualizada existosamente.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @familium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @feild.update(feild_params)\n format.html { redirect_to @feild, notice: 'Feild was successfully updated.' }\n format.json { render :show, status: :ok, location: @feild }\n else\n format.html { render :edit }\n format.json { render json: @feild.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fat.update(fat_params)\n format.html { redirect_to @fat, notice: 'Fat was successfully updated.' }\n format.json { render :show, status: :ok, location: @fat }\n else\n format.html { render :edit }\n format.json { render json: @fat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @person = Person.find(params[:id]) \n respond_to do |format|\n if @person.update(person_params)\n format.json { render json: @person, status: :ok }\n else\n format.json { render json: @person.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @farmer.update(farmer_params)\n format.html { redirect_to @farmer, notice: 'Farmer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @farmer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @fee = Fee.find(params[:id])\n\n respond_to do |format|\n if @fee.update_attributes(params[:fee])\n format.html { redirect_to fees_path, notice: 'Fee was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @json.update(json_params)\n format.html { redirect_to @json, notice: 'Json was successfully updated.' }\n format.json { render :show, status: :ok, location: @json }\n else\n format.html { render :edit }\n format.json { render json: @json.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @json.update(json_params)\n format.html { redirect_to @json, notice: 'Json was successfully updated.' }\n format.json { render :show, status: :ok, location: @json }\n else\n format.html { render :edit }\n format.json { render json: @json.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @franja.update(franja_params)\n format.html { redirect_to @franja, notice: 'La Franja se ha editado correctamente.' }\n format.json { render :show, status: :ok, location: @franja }\n else\n format.html { render :edit }\n format.json { render json: @franja.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @family.update(family_params)\n format.html { redirect_to @family, notice: 'Family was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @family.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch(path, params = {})\n request(:patch, path, params)\n end",
"def patch(path, params = {})\n request(:patch, path, params)\n end",
"def update\n respond_to do |format|\n if @feature_request.update(feature_request_params)\n format.html { redirect_to @feature_request, notice: 'Feature request was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @feature_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n #Finding the specific chore where the id matches the one we pass in with the body\n @v1_chore = Chore.where(id: params[:id]).first\n #Here we're checking if we have user_id in our body, and if we do, we'll change the selected chore's properties\n #with the parameters of the body, we go through the specific group to our specific chore with the path\n if v1_chore_params[:user_id]\n @v1_chore.user_id = params[:user_id]\n @v1_chore.assigned = true\n if @v1_chore.save\n render :show, status: :ok\n end\n else\n render json: @v1_chore.errors, status: :unprocessable_entity\n end\n end",
"def update\n @contractor_feature = ContractorFeature.find(params[:id])\n\n respond_to do |format|\n if @contractor_feature.update_attributes(params[:contractor_feature])\n format.html { redirect_to @contractor_feature, notice: 'Contractor feature was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @contractor_feature.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch(path, data)\n request 'PATCH', path, body: data.to_json\n end",
"def update\n @farmer = Farmer.find(params[:id])\n\n respond_to do |format|\n if @farmer.update_attributes(params[:farmer])\n format.html { redirect_to @farmer, notice: 'Farmer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @farmer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n # { clinic: {id: references, \"license_id\"=>nil, \"name\"=>string } }\n \n if @clinic.update_attributes(params[:clinic].except(:api_license_id))\n head :no_content\n else\n render json: clinic.errors.full_messages, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @fax.update(fax_params)\n format.html { redirect_to @fax, notice: 'Fax was successfully updated.' }\n format.json { render :show, status: :ok, location: @fax }\n else\n format.html { render :edit }\n format.json { render json: @fax.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fact.update(fact_params)\n format.html { redirect_to :back }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @fact.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @freind.update(freind_params)\n format.html { redirect_to @freind, notice: \"Freind was successfully updated.\" }\n format.json { render :show, status: :ok, location: @freind }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @freind.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @dev_folio = DevFolio.find(params[:id])\n\n respond_to do |format|\n if @dev_folio.update_attributes(params[:dev_folio])\n format.html { redirect_to @dev_folio, notice: 'Dev folio was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @dev_folio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @dependence.update(dependence_params)\n format.html { redirect_to @dependence, notice: 'Dependence was successfully updated.' }\n format.json { render :show, status: :ok, location: @dependence }\n else\n format.html { render :edit }\n format.json { render json: @dependence.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fee.update(fee_params)\n format.html { redirect_to @fee, notice: 'Fee was successfully updated.' }\n format.json { render :show, status: :ok, location: @fee }\n else\n format.html { render :edit }\n format.json { render json: @fee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fee.update(fee_params)\n format.html { redirect_to @fee, notice: 'Fee was successfully updated.' }\n format.json { render :show, status: :ok, location: @fee }\n else\n format.html { render :edit }\n format.json { render json: @fee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def update\n respond_to do |format|\n if @tenant_fee.update(tenant_fee_params)\n format.html { redirect_to @tenant_fee, notice: 'Tenant fee was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :edit }\n format.json { render json: @tenant_fee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @foil = Foil.find(params[:id])\n\n respond_to do |format|\n if @foil.update_attributes(params[:foil])\n format.html { redirect_to @foil, notice: 'Foil was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @foil.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @foam = Foam.find(params[:id])\n\n respond_to do |format|\n if @foam.update_attributes(params[:foam])\n format.html { redirect_to @foam, notice: 'Foam was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @foam.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @flavium.update(flavium_params)\n format.html { redirect_to @flavium, notice: 'Flavium was successfully updated.' }\n format.json { render :show, status: :ok, location: @flavium }\n else\n format.html { render :edit }\n format.json { render json: @flavium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @familium.update(familium_params)\n format.html { redirect_to @familium, notice: 'Familium was successfully updated.' }\n format.json { render :show, status: :ok, location: @familium }\n else\n format.html { render :edit }\n format.json { render json: @familium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch(url, payload, headers={})\n RestClient.patch url, payload, headers\n end",
"def update\n @frais_hebergement = FraisHebergement.find(params[:id])\n\n respond_to do |format|\n if @frais_hebergement.update_attributes(params[:frais_hebergement])\n format.html { redirect_to @frais_hebergement, :notice => 'Le frais d\\'hébergement a bien été modifié' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @frais_hebergement.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @agency = Agency.find(params[:id])\n\n if @agency.update(agency_params)\n #head :no_content\n render json: @agency, status: :accepted, location: @agency #sera? status accepted? \n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end"
] | [
"0.6700158",
"0.659464",
"0.6402481",
"0.6275365",
"0.6201156",
"0.6197571",
"0.6190996",
"0.61566097",
"0.61088634",
"0.61082864",
"0.60971856",
"0.6046711",
"0.60307807",
"0.60184574",
"0.5999068",
"0.5981367",
"0.5966143",
"0.5945144",
"0.5945061",
"0.59390193",
"0.5935612",
"0.592092",
"0.5917422",
"0.59170437",
"0.5906354",
"0.59044015",
"0.59010893",
"0.58915985",
"0.5882655",
"0.58724666",
"0.5867616",
"0.5863754",
"0.58509254",
"0.58418924",
"0.58401746",
"0.58231026",
"0.5818755",
"0.5816873",
"0.58168596",
"0.58154833",
"0.58143544",
"0.5810312",
"0.58079267",
"0.57949",
"0.5794515",
"0.5794072",
"0.5787686",
"0.57870513",
"0.578177",
"0.5781608",
"0.5775943",
"0.5775943",
"0.576934",
"0.5768921",
"0.57626",
"0.5762346",
"0.5762152",
"0.57618445",
"0.5758835",
"0.5758438",
"0.57545257",
"0.57545257",
"0.5742122",
"0.5741204",
"0.5741204",
"0.574049",
"0.57372063",
"0.57325435",
"0.5730752",
"0.57295966",
"0.57281816",
"0.5722056",
"0.5720494",
"0.5720494",
"0.571977",
"0.5716198",
"0.5712289",
"0.5712289",
"0.57120585",
"0.5711751",
"0.5710649",
"0.5709596",
"0.5708101",
"0.57064706",
"0.57036895",
"0.5703673",
"0.5693744",
"0.5692301",
"0.56885743",
"0.56839657",
"0.56839657",
"0.5679052",
"0.5677723",
"0.5670445",
"0.5665672",
"0.5662751",
"0.56612325",
"0.5660638",
"0.56533664",
"0.5649846"
] | 0.5700533 | 86 |
DELETE /franchises/1 DELETE /franchises/1.json | def destroy
@franchise.destroy
respond_to do |format|
format.html { redirect_to franchises_url, notice: 'Franchise was successfully destroyed.' }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n franchise.destroy\n\n respond_to do |format|\n format.html { redirect_to franchises_url }\n format.json { head :no_content }\n end\n end",
"def destroy \n @franchise = @franchise_set.franchises.find(params[:id])\n @franchise.destroy\n\n # respond_to do |format| \n # format.html { redirect_to \"/franchise_sets/#{franchise_set.id}/edit\" }\n # format.json { head :ok }\n # end\n \n respond_to do |format|\n format.json {head :ok}\n end\n \n end",
"def destroy\n @franchise.destroy\n respond_to do |format|\n format.html { redirect_to client_location_franchises_path(@client, @location) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @franchise.destroy\n\n respond_to do |format|\n format.html { redirect_to(franchises_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @frat.destroy\n respond_to do |format|\n format.html { redirect_to frats_url }\n format.json { head :no_content }\n end\n end",
"def destroy \n @franchise_set = FranchiseSet.find(params[:id])\n @franchise_set.destroy\n\n respond_to do |format|\n format.html { redirect_to franchise_sets_url }\n format.json { head :ok }\n end\n end",
"def delete\n render :json => @fiestas.delete_at(params[:id].to_i)\n end",
"def delete\n client.delete(\"/#{id}\")\n end",
"def destroy\n @forest = Forest.find(params[:id])\n @forest.destroy\n\n respond_to do |format|\n format.html { redirect_to forests_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fridge = Fridge.find(params[:id])\n @fridge.destroy\n\n respond_to do |format|\n format.html { redirect_to fridges_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @forest.destroy\n respond_to do |format|\n format.html { redirect_to forests_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @familium = Familium.find(params[:id])\n @familium.destroy\n\n respond_to do |format|\n format.html { redirect_to familia_url }\n format.json { head :no_content }\n end\n end",
"def deletef\n url = prefix + \"deletef\" + id_param\n return response(url)\n end",
"def destroy\n @frind.destroy\n respond_to do |format|\n format.html { redirect_to frinds_url, notice: 'Frind was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete\n render json: Alien.delete(params[\"id\"])\n end",
"def destroy\n @festival = Festival.find(params[:id])\n @festival.destroy\n\n respond_to do |format|\n format.html { redirect_to festivals_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @franchisee_royalty.destroy\n respond_to do |format|\n format.html { redirect_to franchisee_royalties_url, notice: 'Franchisee royalty was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete(path)\n RestClient.delete request_base+path\n end",
"def destroy\n @flower = Flower.find(params[:id])\n @flower.destroy\n\n respond_to do |format|\n format.html { redirect_to flowers_url }\n format.json { head :ok }\n end\n end",
"def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def destroy\n @fascicle.destroy\n respond_to do |format|\n format.html { redirect_to fascicles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fabrica.destroy\n respond_to do |format|\n format.html { redirect_to fabricas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fisier.destroy\n respond_to do |format|\n format.html { redirect_to root_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n @basis = Base.find(params[:id])\n @basis.destroy\n\n respond_to do |format|\n format.html { redirect_to bases_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fabric = Fabric.find(params[:id])\n @fabric.destroy\n\n respond_to do |format|\n format.html { redirect_to fabrics_url }\n format.json { head :no_content }\n end\n end",
"def delete path\n make_request(path, \"delete\", {})\n end",
"def destroy\n @four.destroy\n respond_to do |format|\n format.html { redirect_to fours_url }\n format.json { head :no_content }\n end\n end",
"def delete_json(path)\n url = [base_url, path].join\n resp = HTTParty.delete(url, headers: standard_headers)\n parse_json(url, resp)\n end",
"def destroy\n @frete = Frete.find(params[:id])\n @frete.destroy\n\n respond_to do |format|\n format.html { redirect_to fretes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @frais_hebergement = FraisHebergement.find(params[:id])\n @frais_hebergement.destroy\n\n respond_to do |format|\n format.html { redirect_to frais_hebergements_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fish.destroy\n respond_to do |format|\n format.html { redirect_to fish_index_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @trein_consul_comercial.destroy\n respond_to do |format|\n format.html { redirect_to trein_consul_comercials_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @farmacium.destroy\n respond_to do |format|\n msg = { :status => \"ok\", :message => \"Eliminado!\" }\n format.json { render :json => msg }\n end\n end",
"def destroy\n @france.destroy\n respond_to do |format|\n format.html { redirect_to frances_url, notice: 'France was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cafeterium.destroy\n respond_to do |format|\n format.html { redirect_to cafeteria_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fulcliente = Fulcliente.find(params[:id])\n @fulcliente.destroy\n\n respond_to do |format|\n format.html { redirect_to fulclientes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @foaf.destroy\n\n respond_to do |format|\n format.html { redirect_to foafs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @foaf.destroy\n\n respond_to do |format|\n format.html { redirect_to foafs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fiestum.destroy\n respond_to do |format|\n format.html { redirect_to fiesta_url, notice: 'Fiestum was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @one = One.find(params[:id])\n @one.destroy\n\n respond_to do |format|\n format.html { redirect_to ones_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @familium.destroy\n respond_to do |format|\n format.html { redirect_to familia_url, notice: 'Familium was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @furdo.destroy\n respond_to do |format|\n format.html { redirect_to furdos_url, notice: 'Furdo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @futbolada = Futbolada.find(params[:id])\n @futbolada.destroy\n\n respond_to do |format|\n format.html { redirect_to gestion_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fast.destroy\n respond_to do |format|\n format.html { redirect_to fasts_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asignatura.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @father = Father.find(params[:id])\n @father.destroy\n\n respond_to do |format|\n format.html { redirect_to fathers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @historial = Historial.find(params[:id])\n @historial.destroy\n\n respond_to do |format|\n format.html { redirect_to historials_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @fostered.destroy\n respond_to do |format|\n format.html { redirect_to fostereds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @chaine = Chaine.find(params[:id])\n @chaine.destroy\n\n respond_to do |format|\n format.html { redirect_to chaines_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n string = \"DELETE FROM notes WHERE famille_id = #{@famille.id}\"\n connection = Demande.connection\n connection.delete(string)\n @famille.destroy\n respond_to do |format|\n format.html { redirect_to familles_url, notice: 'Famille was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @flight.destroy\n respond_to do |format|\n format.html { redirect_to flights_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @flight.destroy\n respond_to do |format|\n format.html { redirect_to flights_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @flight.destroy\n respond_to do |format|\n format.html { redirect_to flights_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @deco1.destroy\n respond_to do |format|\n format.html { redirect_to deco1s_url, notice: 'Deco1 was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def borrar \n\n fiesta.destroy\n render json: fiesta \n end",
"def destroy\n @campus_food = CampusFood.find(params[:id])\n @campus_food.destroy\n\n respond_to do |format|\n format.html { redirect_to campus_foods_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @fundraiser = Fundraiser.find(params[:id])\n @fundraiser.destroy\n\n respond_to do |format|\n format.html { redirect_to fundraisers_url }\n format.json { head :no_content }\n end\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def destroy\n @factura = Factura.find(params[:factura_id])\n @renglon_factura = @factura.renglon_facturas.find(params[:id]).destroy\n\n respond_to do |format|\n format.html { head :ok }\n #format.json { head :ok }\n end\n end",
"def destroy\n @ref_sanatorium = Ref::Sanatorium.find(params[:id])\n @ref_sanatorium.destroy\n\n respond_to do |format|\n format.html { redirect_to ref_sanatoria_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @golfer = Golfer.find(params[:id])\n @golfer.destroy\n\n respond_to do |format|\n format.html { redirect_to golfers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fraternity.destroy\n respond_to do |format|\n format.html { redirect_to fraternities_url, notice: 'Fraternity was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @frais_annex = FraisAnnex.find(params[:id])\n @frais_annex.destroy\n\n respond_to do |format|\n format.html { redirect_to frais_annexes_url }\n format.json { head :no_content }\n end\n end",
"def delete\n render json: Post.delete(params[\"id\"])\n end",
"def destroy\n @referee = Referee.find(params[:id])\n @referee.destroy\n\n respond_to do |format|\n format.html { redirect_to referees_url }\n format.json { head :no_content }\n end\n end",
"def delete(id:)\n id_check(:id, id)\n\n cf_delete(path: \"/organizations/#{org_id}/railguns/#{id}\")\n end",
"def destroy\n @humanidades1 = Humanidades1.find(params[:id])\n @humanidades1.destroy\n\n respond_to do |format|\n format.html { redirect_to humanidades1s_url }\n format.json { head :no_content }\n end\n end",
"def delete\n render json: Company.delete(params[\"id\"])\n end",
"def destroy\n @depot_fuel.destroy\n respond_to do |format|\n format.html { redirect_to depot_fuels_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fabricsofaset.destroy\n respond_to do |format|\n format.html { redirect_to fabricsofasets_url, notice: 'Fabricsofaset was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gastronomium.destroy\n respond_to do |format|\n format.html { redirect_to gastronomia_url, notice: 'Gastronomium was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @visitante.destroy\n respond_to do |format|\n format.html { redirect_to visitantes_url, notice: 'Visitante was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @habitant.destroy\n respond_to do |format|\n format.html { redirect_to habitants_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resturant.destroy\n respond_to do |format|\n format.html { redirect_to resturants_url, notice: 'Resturant was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fourteen.destroy\n respond_to do |format|\n format.html { redirect_to fourteens_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bustour.destroy\n respond_to do |format|\n format.html { redirect_to bustours_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @frais_repa = FraisRepa.find(params[:id])\n @frais_repa.destroy\n\n respond_to do |format|\n format.html { redirect_to frais_repas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @food = Food.find(params[:id])\n @food.destroy\n\n respond_to do |format|\n format.html { redirect_to foods_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @food = Food.find(params[:id])\n @food.destroy\n\n respond_to do |format|\n format.html { redirect_to foods_url }\n format.json { head :ok }\n end\n end",
"def delete\n client.delete(url)\n @deleted = true\nend",
"def destroy\n @freind.destroy\n respond_to do |format|\n format.html { redirect_to freinds_url, notice: \"Freind was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @visit = Visit.find(params[:id])\n @visit.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @fondo.destroy\n respond_to do |format|\n format.html { redirect_to fondos_url, notice: 'Fondo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def test_del\n header 'Content-Type', 'application/json'\n\n data = File.read 'sample-traces/0.json'\n post('/traces', data, 'CONTENT_TYPE': 'application/json')\n\n id = last_response.body\n\n delete \"/traces/#{id}\"\n assert last_response.ok?\n\n get \"/traces/#{id}\"\n\n contents = JSON.parse last_response.body\n assert_kind_of(Hash, contents, 'Response contents is not a hash')\n assert contents.key? 'description'\n assert(!last_response.ok?)\n end",
"def destroy\n @foiltype = Foiltype.find(params[:id])\n @foiltype.destroy\n\n respond_to do |format|\n format.html { redirect_to foiltypes_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @flight = Flight.find(params[:id])\n @flight.destroy\n\n respond_to do |format|\n format.html { redirect_to flights_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @festival.destroy\n respond_to do |format|\n format.html { redirect_to festivals_url, notice: 'Festival was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @flat.destroy\n\n respond_to do |format|\n format.html { redirect_to flats_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @visit.destroy\n respond_to do |format|\n format.html { redirect_to visits_url, notice: 'Visita eliminada con exito' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @farmer.destroy\n respond_to do |format|\n format.html { redirect_to farmers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @franja.destroy\n respond_to do |format|\n format.html { redirect_to franjas_url, notice: 'La Franja se elimino correctamente.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @formulary = Formulary.find(params[:id])\n @formulary.destroy\n\n respond_to do |format|\n format.html { redirect_to formularies_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fridge_food = FridgeFood.find(params[:id])\n @fridge_food.destroy\n\n respond_to do |format|\n format.html { redirect_to(fridge_foods_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @dinosaur = Dinosaur.find(params[:id])\n @dinosaur.destroy\n\n respond_to do |format|\n format.html { redirect_to dinosaurs_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @familia = Familia.find(params[:id])\n @familia.destroy\n\n respond_to do |format|\n format.html { redirect_to(familias_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @furniture.destroy\n respond_to do |format|\n format.html { redirect_to furnitures_url, notice: 'Furniture Has Been Successfully Deleted.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @flight = Flight.find(params[:id])\n @flight.destroy\n\n respond_to do |format|\n format.html { redirect_to flights_url }\n format.json { head :ok }\n end\n end"
] | [
"0.7426003",
"0.73714197",
"0.7231688",
"0.710703",
"0.70505047",
"0.7008981",
"0.6990697",
"0.68669045",
"0.6827915",
"0.68034506",
"0.678156",
"0.6779242",
"0.6769297",
"0.67291707",
"0.6724265",
"0.67088115",
"0.67023784",
"0.6676823",
"0.66693914",
"0.6657469",
"0.6655576",
"0.6641624",
"0.6638372",
"0.6629816",
"0.6623946",
"0.66234094",
"0.6622756",
"0.66140306",
"0.6613304",
"0.6610717",
"0.6609027",
"0.66000134",
"0.6594813",
"0.6592881",
"0.65908486",
"0.65854067",
"0.65845275",
"0.65845275",
"0.6583418",
"0.65794206",
"0.65686977",
"0.65663004",
"0.6559939",
"0.65414447",
"0.6533835",
"0.6527094",
"0.6525592",
"0.65131253",
"0.65126777",
"0.6511278",
"0.65012306",
"0.65012306",
"0.65012306",
"0.6500681",
"0.650028",
"0.6493324",
"0.64833766",
"0.64831847",
"0.64831847",
"0.64831847",
"0.64831847",
"0.64823675",
"0.6477279",
"0.64703727",
"0.64679617",
"0.6467648",
"0.6466092",
"0.6465492",
"0.64647347",
"0.64635754",
"0.6462796",
"0.64622873",
"0.64590293",
"0.6456566",
"0.64550096",
"0.6454455",
"0.64508015",
"0.6450439",
"0.6446207",
"0.6444395",
"0.6443319",
"0.6443319",
"0.64428055",
"0.6440118",
"0.64389324",
"0.6438738",
"0.6437985",
"0.6437324",
"0.6436683",
"0.6436266",
"0.6436097",
"0.64354134",
"0.6431241",
"0.64289373",
"0.64280456",
"0.64227194",
"0.64187205",
"0.6418471",
"0.64181143",
"0.64162606"
] | 0.7141111 | 3 |
Use callbacks to share common setup or constraints between actions. | def set_franchise
@franchise = Franchise.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def franchise_params
params.require(:franchise).permit(
:name,
:franchise_number,
:legal_name,
:phone,
:fax,
:website,
:general_license,
:residential_license,
:commercial_license,
:mold_remediation_license,
:adwords_client_id,
uploads_attributes: [
:upload_category_id,
:description,
uploads: []
],
notes_attributes: [:content],
work_order_distribution_ids: [],
scheduling_manager_ids: []
)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n end",
"def param_whitelist\n [:role, :title]\n end",
"def expected_permitted_parameter_names; end",
"def safe_params\n params.except(:host, :port, :protocol).permit!\n end",
"def strong_params\n params.require(:team_member).permit(param_whitelist)\n end",
"def permitir_parametros\n \t\tparams.permit!\n \tend",
"def strong_params\n params.require(:community).permit(param_whitelist)\n end",
"def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end",
"def strong_params\n params.require(:education).permit(param_whitelist)\n end",
"def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end",
"def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end",
"def param_whitelist\n [:rating, :review]\n end",
"def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end",
"def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end",
"def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end",
"def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end",
"def valid_params_request?; end",
"def strong_params\n params.require(:experience).permit(param_whitelist)\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end",
"def allowed_params\n params.require(:allowed).permit(:email)\n end",
"def permitted_params\n []\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end",
"def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend",
"def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end",
"def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end",
"def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end",
"def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end",
"def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end",
"def safe_params\n params.require(:user).permit(:name)\n end",
"def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend",
"def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end",
"def check_params; true; end",
"def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end",
"def quote_params\n params.permit!\n end",
"def valid_params?; end",
"def paramunold_params\n params.require(:paramunold).permit!\n end",
"def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend",
"def filtered_parameters; end",
"def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end",
"def filtering_params\n params.permit(:email, :name)\n end",
"def check_params\n true\n end",
"def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend",
"def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend",
"def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end",
"def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end",
"def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end",
"def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend",
"def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end",
"def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end",
"def active_code_params\n params[:active_code].permit\n end",
"def filtering_params\n params.permit(:email)\n end",
"def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end",
"def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end",
"def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end",
"def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end",
"def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end",
"def list_params\n params.permit(:name)\n end",
"def filter_parameters; end",
"def filter_parameters; end",
"def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end",
"def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end",
"def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end",
"def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end",
"def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end",
"def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end",
"def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end",
"def url_whitelist; end",
"def admin_social_network_params\n params.require(:social_network).permit!\n end",
"def filter_params\n params.require(:filters).permit(:letters)\n end",
"def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end",
"def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end",
"def sensitive_params=(params)\n @sensitive_params = params\n end",
"def permit_request_params\n params.permit(:address)\n end",
"def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end",
"def secure_params\n params.require(:location).permit(:name)\n end",
"def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end",
"def question_params\n params.require(:survey_question).permit(question_whitelist)\n end",
"def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end",
"def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end",
"def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end",
"def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end",
"def url_params\n params[:url].permit(:full)\n end",
"def backend_user_params\n params.permit!\n end",
"def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend",
"def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end",
"def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end",
"def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end",
"def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end",
"def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end",
"def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end",
"def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end",
"def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end"
] | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",
"0.62894756",
"0.6283177",
"0.6242471",
"0.62382483",
"0.6217549",
"0.6214457",
"0.6209053",
"0.6193042",
"0.6177802",
"0.6174604",
"0.61714715",
"0.6161512",
"0.6151757",
"0.6150663",
"0.61461",
"0.61213595",
"0.611406",
"0.6106206",
"0.6105114",
"0.6089039",
"0.6081015",
"0.6071004",
"0.60620916",
"0.6019971",
"0.601788",
"0.6011056",
"0.6010898",
"0.6005122",
"0.6005122",
"0.6001556",
"0.6001049",
"0.59943926",
"0.5992201",
"0.59909594",
"0.5990628",
"0.5980841",
"0.59669393",
"0.59589154",
"0.5958826",
"0.5957911",
"0.5957385",
"0.5953072",
"0.59526145",
"0.5943361",
"0.59386164",
"0.59375334",
"0.59375334",
"0.5933856",
"0.59292704",
"0.59254247",
"0.5924164",
"0.59167904",
"0.59088355",
"0.5907542",
"0.59064597",
"0.5906243",
"0.5898226",
"0.589687",
"0.5896091",
"0.5894501",
"0.5894289",
"0.5891739",
"0.58860534",
"0.5882406",
"0.587974",
"0.58738774",
"0.5869024",
"0.58679986",
"0.5867561",
"0.5865932",
"0.5864461",
"0.58639693",
"0.58617616",
"0.5861436",
"0.5860451",
"0.58602303",
"0.5854586",
"0.58537364",
"0.5850427",
"0.5850199"
] | 0.0 | -1 |
Converts the given field to the value that the database adapter returns as a usable column name: column_alias_for("users.id") => "users_id" column_alias_for("sum(id)") => "sum_id" column_alias_for("count(distinct users.id)") => "count_distinct_users_id" column_alias_for("count()") => "count_all" | def column_alias_for(field)
column_alias = +field
column_alias.gsub!(/\*/, "all")
column_alias.gsub!(/\W+/, " ")
column_alias.strip!
column_alias.gsub!(/ +/, "_")
@connection.table_alias_for(column_alias)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def col_name(col)\n [@_table_name, col.name].join('.')\n end",
"def converted_field_name\n FIELD_NAMES_TABLE[@field_name] || @field_name\n end",
"def aggregated_fact_column_sql\n aggregate_fields.collect { |c| \n \"#{c.strategy_name}(#{c.from_table_name}.#{c.name}) AS #{c.label_for_table}\"\n }.join(\",\")\n end",
"def column_name\n Name.new(\"#{name}_id\")\n end",
"def normalize_column(field)\n field.is_a?(Symbol) ? {:name => field} : field\n end",
"def column_name(column)\n tmp = ''\n column.name.gsub(/_/, ' ').gsub(/id/,\"\").split.each{|word| tmp += word.capitalize + ' '}\n return tmp\n end",
"def namespaced_column(column, klass=nil)\n if klass.respond_to?(:table_name)\n ::ActiveRecord::Base.connection.quote_table_name(klass.table_name) + '.' + column.to_s\n else\n column.to_s\n end\n end",
"def encode_name(col)\n if col.name == 'id'\n :_id\n elsif col.name == 'type'\n :_type\n else\n col.name.to_sym\n end\n end",
"def column_with_prefix(column)\n if column.is_string?\n column.__name\n elsif associations[column].empty?\n \"#{@model.quoted_table_name}.#{quote_column(column.__name)}\"\n else\n associations[column].collect { |assoc|\n assoc.has_column?(column.__name) ?\n \"#{@model.connection.quote_table_name(assoc.join.aliased_table_name)}\" + \n \".#{quote_column(column.__name)}\" :\n nil\n }.compact.join(', ')\n end\n end",
"def _ field\n as_name = [name, field].join(\"_\").to_sym\n AS[\"#{relationize}.(#{field})\", as_name, Bag.new([field, field_type(field)]), nil, :skip_type]\n end",
"def polymorphic_column_name(reflection, column_name)\n # self.polymorphic_namespace_name.to_s + '.' + (reflection.options[:as] ? reflection.options[:as].to_s.classify : reflection.class_name.to_s) + '#' + column_name.to_s\n self.polymorphic_namespace_name.to_s + '#' + column_name.to_s\n end",
"def count_by column_name, field\n inc = increment_mask_for column_name, field\n only = only_mask_for column_name, field\n # Create super-special-bitfield-grouping-query w/ AREL\n sql = arel_table.\n project(\"count(#{primary_key}) as #{field}_count, (#{column_name} & #{only})/#{inc} as #{field}\").\n group(field).to_sql\n connection.send :select, sql, 'AREL' # Execute the query\n end",
"def field_to_col(field)\n dbs, fld = field.split(\"-\")\n (dbs.include?(\".\") ? \"#{dbs.split(\".\").last}.#{fld}\" : fld)\n end",
"def field_name\n return nil if virtual?\n @field_name ||= column ? quoted_field_name(column.name) : association.foreign_key\n end",
"def column_label_for(type, request, model, column)\n unless label = model.column_options_for(type, request, column)[:label]\n label = humanize(column)\n end\n label\n end",
"def get_field_column_name(args)\r\n #expected args => :unlimited, :field\r\n prefix = args[:unlimited] ? \"u_\" : \"l_\"\r\n field_id = \"_#{args[:field_id]}\"\r\n field_name = args[:field]\r\n column_name = \"#{prefix}#{field_name}\"\r\n checksum = \"_#{Digest::CRC16.hexdigest(field_name)}\"\r\n field_truncate_pos = @max_db_identifier_size - (prefix.size + checksum.size)\r\n if column_name.size > @max_db_identifier_size then\r\n \"#{prefix}#{field_name[0,field_truncate_pos]}#{checksum}\"\r\n else\r\n column_name\r\n end\r\n end",
"def get_field(field, collection)\n if field.is_a?(Hash) # rubocop:disable Style/ConditionalAssignment\n field = \"#{field[:table]}.#{field[:field]}\"\n else\n field = \"#{collection.table_name}.#{field}\"\n end\n field_base.gsub(Placeholder::FIELD, field)\n end",
"def define_field_name(field)\n (field.to_s + '_field').to_sym.tap do |field_method_name|\n field_method_names << field_method_name\n end\n end",
"def column_name_for(class_type)\n if class_type == \"User\"\n return \"task_owners.user_id\"\n elsif class_type == \"Project\"\n return \"tasks.project_id\"\n elsif class_type == \"Task\"\n return \"tasks.id\"\n elsif class_type == \"Customer\"\n return \"projects.customer_id\"\n elsif class_type == \"Company\"\n return \"tasks.company_id\"\n elsif class_type == \"Milestone\"\n return \"tasks.milestone_id\"\n elsif class_type == \"Tag\"\n return \"task_tags.tag_id\"\n else\n return \"#{ class_type.downcase }_id\"\n end\n end",
"def col(name)\n query_table[name.to_s]\n end",
"def col(name)\n query_table[name.to_s]\n end",
"def column\n \"#{table_name}.#{friendly_id_config.cache_column}\"\n end",
"def process_field(field_name)\n if ['id', 'name', 'first_name', 'node_id'].include?(field_name)\n \"#{table}.#{field_name}\"\n else\n super # raises\n end\n end",
"def caprese_unalias_field(field)\n caprese_field_aliases[field = field.to_sym] || field\n end",
"def column_name\n ensure_setup!\n column.name.to_sym\n end",
"def translation_for(field)\n object.class.human_attribute_name(field)\n end",
"def field_name\n @field.name\n end",
"def unique_name\n if @columns.length == 1\n @alias || @columns.first.__name\n else\n @alias\n end\n end",
"def column_name\n name.to_sym\n end",
"def field_human_name(field)\n I18n.translate(\"#{model_name}.attributes.#{field}\", :count => 1, :default => Inflections.humanize(field), :scope => :models)\n end",
"def column_name; end",
"def name_field\n category_ids = AttributeCategory.where(\n user_id: user_id,\n entity_type: self.class.name.downcase\n ).pluck(:id)\n\n # Todo these two queries should be able to be joined into one\n AttributeField.find_by(\n user_id: user_id,\n attribute_category_id: category_ids,\n field_type: 'name'\n )\n end",
"def field_name(field=nil)\n result = field_name_fragment\n result << \"[#{field}]\" if field\n result\n end",
"def chooseColumnAlias\n @metadata.chooseColumnAlias\n end",
"def get_indexed_name(field_name)\n column_schema = self.metadata.column_schema\n indexed_names_by_column_name = Hash[column_schema.map do |schema_entry|\n [schema_entry.name, schema_entry.indexed_name]\n end]\n indexed_names_by_column_name[field_name]\n end",
"def converted_field_name\n FIELD_NAMES_TABLE[@token.field_name] || @token.field_name\n end",
"def column_name(scope, table_alias, identifier)\n if scope.column_names.include?(identifier)\n \"(#{table_alias}.#{identifier})\"\n else\n raise Kaprella::Errors::UnknownPropertyIdentifier.new(identifier)\n end\n end",
"def autoreport_column(column)\n return if column.name == 'id'\n belongs_to_ref = klass.reflections.find { |_, a| a.foreign_key == column.name }\n if belongs_to_ref\n name, ref = belongs_to_ref\n name_col = (ref.klass.column_names & autoreport_association_name_columns(ref)).first\n if name_col\n name_expr = \"#{ref.klass.table_name}.#{name_col}\"\n category_dimension name, expression: name_expr, relation: ->(r) { r.joins(name) }\n else\n category_dimension column.name\n end\n elsif column.cast_type.type == :datetime\n time_dimension column.name\n elsif column.cast_type.number?\n number_dimension column.name\n else\n category_dimension column.name\n end\n end",
"def present_field_as_label(field)\n field.to_s.sub(/_id$/, '').split('_').map(&:capitalize).join(' ')\n end",
"def t(column)\n object.class.human_attribute_name(column.to_sym)\n end",
"def override_search_field(column)\n \"#{column.name}_search_column\"\n end",
"def project_custom_field(_table, column_name)\n # two scenarios:\n field_type = custom_field_type(column_name)\n\n # 1. this is a calculated column that can be calculated in query\n # - then we supply the arel directly here\n if field_type == :calculated\n calculated = build_custom_calculated_field(column_name)[:arel]\n # `as` is needed to name the column so it can deserialize into active model\n return calculated.as(column_name.to_s) unless calculated.nil?\n end\n\n # 2. this is a virtual column who's result will be calculated post-query in rails and we're just fetching source columns\n # - then we use query_attributes and apply transform after the fact\n return build_custom_virtual_field(column_name) if field_type == :virtual\n\n # if nil, this is not a custom field\n raise \"unknown field type #{field_type}\" unless field_type.nil?\n\n nil\n end",
"def my_column_name\n return @my_column_name if defined? @my_column_name\n if is_sq?\n @my_column_name = \"#{lime_group.sid}X#{gid}X#{parent_qid}#{title}\"\n else\n @my_column_name = \"#{lime_group.sid}X#{gid}X#{qid}\"\n end\n return @my_column_name\n end",
"def counter_cache_column_name\n @counter_cache_column_name ||= (@options[:counter_cache].is_a?(String) ||\n @options[:counter_cache].is_a?(Symbol)) ?\n @options[:counter_cache] : \"#{inverse || inverse_class_name.demodulize.underscore.pluralize}_count\"\n end",
"def caching_column_name(dimension = nil)\n name = options[:cache_column].to_s\n name += \"_#{dimension.to_s.underscore}\" unless dimension.blank?\n name\n end",
"def field_name\n self.class.name.split(\"::\").last.downcase.sub(\"of\", \"_of_\"). \\\n sub(\"field\", \"\").downcase\n end",
"def field_name(relation)\n \"__#{relation}_ids\"\n end",
"def columnName_to_fieldname (name)\n return name.downcase.gsub(' ','-')\nend",
"def local_field_to_ad field_name\n @local_to_ad_map ||= ::Devise.ad_attr_mapping[devise_model_name.to_sym]\n return (@local_to_ad_map.has_key? field_name) ? @local_to_ad_map[field_name] : field_name\n end",
"def resource_attribute_name(field, direction = :from)\n (transforms(direction)[field] || field).to_sym\n end",
"def quote_column_name(name)\n name.to_s\n end",
"def user_column\n IdMethods::USER_COLUMN\n end",
"def get_column_string(name, type)\n case type\n when 'primary_key' then \"primary_key :#{name}\"\n when 'integer' then \"Integer :#{name}\"\n when 'string' then \"String :#{name}\"\n when 'text' then \"String :#{name}, text: true\"\n when 'boolean' then \"TrueClass :#{name}\"\n when 'float' then \"Float :#{name}\"\n when 'date' then \"Date :#{name}\"\n when 'time' then \"Time :#{name}\"\n when 'references' then \"foreign_key :#{name.foreign_key}, :#{name.tableize}\"\n when 'references_singleton' then \"foreign_key :#{name.foreign_key}, :#{name.underscore}\"\n end\n end",
"def user_column\n end",
"def column_for(col_name)\n relation.column_names.detect { |col| col == col_name }\n end",
"def value_for(field)\n value = instance_variable_get(:\"@#{field}\")\n value.nil? ? nil : value.to_s\n end",
"def index_field_label(document, field)\n lookup_display_name(document, field) || super\n end",
"def arel_field\n case param.field.options[:transmogrification]\n when :timestamp_to_hours\n Arel::Nodes::NamedFunction.new('TIMESTAMPDIFF', [Arel::Nodes::SqlLiteral.new('HOUR'), arel_table[field], Arel::Nodes::SqlLiteral.new('UTC_TIMESTAMP()')])\n when :timestamp_to_days\n Arel::Nodes::NamedFunction.new('TIMESTAMPDIFF', [Arel::Nodes::SqlLiteral.new('DAY'), arel_table[field], Arel::Nodes::SqlLiteral.new('UTC_TIMESTAMP()')])\n when :sum\n Arel::Nodes::NamedFunction.new('SUM', [arel_table[field]])\n when :upper\n Arel::Nodes::NamedFunction.new('UPPER', [arel_table[field]])\n when :lower\n Arel::Nodes::NamedFunction.new('LOWER', [arel_table[field]])\n else\n arel_table[field]\n end\n end",
"def column_label(column, default = nil)\n return '' if column.blank?\n defaults = ['reportings.{{model}}.{{column}}', 'models.attributes.{{model}}.{{column}}'].collect do |scope|\n scope.gsub!('{{model}}', self.class.name.underscore.gsub('/', '.'))\n scope.gsub('{{column}}', column.to_s)\n end.collect(&:to_sym)\n defaults << column.to_s.humanize\n I18n.t(defaults.shift, :default => defaults)\n end",
"def attribute_field_name(attr)\n attr.to_s.camelize(:lower)\n end",
"def to_sym\n \"#{column}.#{method}\".to_sym\n end",
"def to_sym\n \"#{column}.#{method}\".to_sym\n end",
"def t_field(field_label = nil, obj = '')\n return '' if field_label.blank?\n\n case obj\n when Class\n I18n.t(field_label, scope: \"activerecord.attributes.#{obj.class}\", default: field_label).try(:capitalize)\n when String\n I18n.t(field_label, scope: \"activerecord.attributes.#{obj}\", default: field_label).try(:capitalize)\n else\n I18n.t(field_label, default: field_label).try(:capitalize)\n end\n end",
"def type_method_from_database(attribute)\n type = type_from_database(attribute)\n\n unless type.nil?\n \"#{type}_field\"\n else\n nil\n end\n end",
"def to_field_name(name)\n # camelize strips leading underscores, which is undesirable.\n if name.to_s.starts_with?('_')\n \"_#{to_field_name(name.to_s[1..-1])}\"\n elsif Rails.config.camel_case\n name.to_s.camelize(:lower)\n else\n name.to_s\n end\n end",
"def field\n @field ||= quoted_field(field_name)\n end",
"def qualified_column_name(column, table)\n if Symbol === column \n c_table, column, c_alias = split_symbol(column)\n schema, table, t_alias = split_symbol(table) if Symbol === table\n c_table ||= t_alias || table\n ::Sequel::SQL::QualifiedIdentifier.new(c_table, column)\n else\n column\n end\n end",
"def counter_cache_name_for(obj, cache_name_finder)\n # figure out what the column name is\n if cache_name_finder.is_a? Proc\n # dynamic column name -- call the Proc\n cache_name_finder.call(obj)\n else\n # static column name\n cache_name_finder\n end\n end",
"def column_aliases(node)\n @name_and_alias_cache[node]\n end",
"def get_filter_name(field, model = nil)\n field_name = model.present? ? \"#{model.table_name}.#{field}\" : field\n \"#{name}[#{filter_param_name}][#{field_name}]\"\n end",
"def column_as(col) #nodoc\n _col = column(col)\n _column_hashes.values.find{ |c| c.as == _col.name }\n end",
"def grouped_column_names_for(object)\n object.column_names.map { |column| \"#{object.table_name}.#{column}\" }.join(', ')\n end",
"def field_name(ind)\n IBM_DB.field_name(@stmt, ind)\n end",
"def price_field\n \"#{field_name.parameterize.gsub('-', '_')}_value_as_integer\"\n end",
"def caching_column_name(dimension = nil)\n self.class.caching_column_name(dimension)\n end",
"def ad_field_to_local field_name\n @ad_to_local_map ||= ::Devise.ad_attr_mapping[devise_model_name.to_sym].invert\n return (@ad_to_local_map.has_key? field_name) ? @ad_to_local_map[field_name] : field_name\n end",
"def table_alias\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.alias\n end\n end",
"def contact_field_name(contact_field)\n contact_field_translations[contact_field]\n end",
"def field_label(field)\n @__field_label_cache ||= {}\n @__field_label_cache[field] ||= field.to_s.sub(/_facet$|_display$|_[a-z]$/,'').gsub(/_/,' ')\n @__field_label_cache[field]\n end",
"def dig_column(klass, name_or_segments)\n segments = name_or_segments.is_a?(Array) ? name_or_segments : name_or_segments.to_s.split(\".\")\n column_name = segments.shift\n if( segments.empty? )\n return klass.columns_hash[column_name]\n else\n reflection = klass.reflect_on_association(column_name.to_sym)\n raise \"Can't find reflection for: #{klass}.#{column_name}\" unless reflection\n raise \"Can't fetch column from has_many association: #{klass}.#{column_name}\" if reflection.macro == :has_many\n return dig_column(reflection.klass, segments)\n end\n end",
"def alias_column(pretty, original)\n self.column_aliases[pretty] = original\n end",
"def quote_column_name(name) #:nodoc:\r\n %Q(\"#{name}\")\r\n end",
"def column_for_attribute(name)\n name = name.to_sym\n return self.faux_columns[name] if self.faux_columns.has_key?(name)\n super\n end",
"def columns_for_distinct(columns, orders) #:nodoc:\n order_columns = orders.reject(&:blank?).map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(?:ASC|DESC)\\b/i, '')\n .gsub(/\\s+NULLS\\s+(?:FIRST|LAST)\\b/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n [super, *order_columns].join(', ')\n end",
"def field\n @field ||= @options.fetch(:field, repository.adapter.field_naming_convention.call(name))\n end",
"def def_column_alias(meth, column)\n clear_setter_methods_cache\n overridable_methods_module.module_eval do\n define_method(meth){self[column]}\n define_method(\"#{meth}=\"){|v| self[column] = v}\n end\n end",
"def def_column_alias(meth, column)\n clear_setter_methods_cache\n overridable_methods_module.module_eval do\n define_method(meth){self[column]}\n define_method(\"#{meth}=\"){|v| self[column] = v}\n end\n end",
"def format_audited_field(field)\n return auditable.additional_question.name if auditable_type.eql?('AdditionalAnswer') && field.eql?('answer')\n\n field\n end",
"def get_variable(row, field)\n val = row.instance_variable_get(\"@#{field}\")\n val.nil? ? '' : val\n end",
"def for_field(field)\n @all.select { |f| f.field == field.to_sym }\n end",
"def get_name_column(options) #:nodoc:\n if options.has_key?(:name_column) && !options[:name_column].blank? then\n options[:name_column].to_s.to_sym\n else\n :name\n end\n end",
"def sort_field\n @view_column.fetch(:sort_field, field)\n end",
"def to_sym\n @_to_sym ||=\n if qualified? && aliased?\n :\"#{table_name}__#{name}___#{meta[:alias]}\"\n elsif qualified?\n :\"#{table_name}__#{name}\"\n elsif aliased?\n :\"#{name}___#{meta[:alias]}\"\n else\n name\n end\n end",
"def join_alias(join)\r\n table_name = join.model_class.table_name\r\n new_alias = table_name\r\n if @join_aliases[table_name]\r\n new_alias = \"#{pluralize(join.reflection)}_#{join.parent.model_class.table_name}\"\r\n if @join_aliases[table_name].include? new_alias\r\n new_alias += '1'\r\n while @join_aliases[table_name].include? new_alias\r\n new_alias = new_alias.succ\r\n end\r\n end\r\n end\r\n (@join_aliases[table_name] ||= []) << new_alias\r\n return new_alias\r\n end",
"def columns_for_distinct(columns, orders)\n # Lifted from the default Postgres implementation\n order_columns = orders.map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(ASC|DESC)\\s*(NULLS\\s+(FIRST|LAST)\\s*)?/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n [super, *order_columns].join(', ')\n end",
"def to_select_sql\n clause = @columns.collect { |column|\n column_with_prefix(column)\n }.join(', ')\n \n clause = adapter.concatenate(clause) if concat_ws?\n clause = adapter.group_concatenate(clause) if is_many?\n \n \"#{adapter.cast_to_string clause } AS #{quote_column(unique_name)}\"\n end",
"def column_name(index)\n \"Column #{index}\"\n end",
"def call_field(field)\n field.to_s.split('__').inject(self) { |a, e| a&.send(e) }\n rescue NoMethodError\n ''\n end",
"def human_attribute_name(field_name, options={})\n \"\"\n end",
"def column_for_field_index(index)\n column(column_index_for_field_index(index))\n end"
] | [
"0.5895517",
"0.58782315",
"0.58514905",
"0.5712649",
"0.56917036",
"0.5688646",
"0.56815845",
"0.56802845",
"0.56715626",
"0.5626864",
"0.56251174",
"0.56140554",
"0.5603858",
"0.5601835",
"0.5581238",
"0.5534465",
"0.5524514",
"0.5519214",
"0.54917383",
"0.5488859",
"0.5488859",
"0.54255515",
"0.5418042",
"0.54025453",
"0.53976935",
"0.5380649",
"0.535876",
"0.5353694",
"0.53316104",
"0.5329445",
"0.5325294",
"0.5284385",
"0.5278921",
"0.523738",
"0.52315086",
"0.5218685",
"0.521669",
"0.52113575",
"0.51948553",
"0.5167879",
"0.51649374",
"0.5161941",
"0.5158416",
"0.51156837",
"0.5112465",
"0.51037586",
"0.5098414",
"0.5096439",
"0.50847065",
"0.50800616",
"0.5079119",
"0.50746",
"0.50653815",
"0.50476676",
"0.5038286",
"0.5037675",
"0.5033874",
"0.5033786",
"0.5019804",
"0.50146884",
"0.5010725",
"0.5010725",
"0.49936664",
"0.49797294",
"0.49780968",
"0.49760297",
"0.4974455",
"0.49631095",
"0.49557188",
"0.49556267",
"0.4951405",
"0.4949882",
"0.49389687",
"0.49357653",
"0.4933137",
"0.49305514",
"0.49227422",
"0.49212012",
"0.49080542",
"0.4906689",
"0.4900581",
"0.48958853",
"0.48820597",
"0.48726526",
"0.4871864",
"0.48673192",
"0.48673192",
"0.48625782",
"0.48465654",
"0.48411673",
"0.48285902",
"0.48274428",
"0.48189825",
"0.48168173",
"0.48148236",
"0.4809518",
"0.48083863",
"0.480511",
"0.47937348",
"0.478416"
] | 0.7471608 | 0 |
Same as count but perform the query asynchronously and returns an ActiveRecord::Promise | def async_count(column_name = nil)
async.count(column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def count\n @count ||=\n begin\n # self.sql sets self.model_class.\n this_sql = sql(:count => true)\n model_class.connection.\n query(this_sql).\n first.first\n end\n end",
"def count\n query.count\n end",
"def count\n query.count\n end",
"def count\n @count ||= @query.count\n end",
"def get_count (db, query)\n\tcount = db.execute query\n\treturn count[0][0]\nend",
"def comments_per_recipe\n db_connection do |conn|\n conn.exec(\"SELECT recipe_id, COUNT(*) FROM comments GROUP BY recipe_id\").to_a\n end\nend",
"def count\n ensure_aggregate!\n SQLite::API.aggregate_count( @func )\n end",
"def count\n load\n @result_count\n end",
"def count_by_sql(sql)\n count = connection.select_one(sql, \"#{name} Count\").values.first\n return count ? count.to_i : 0\n end",
"def count_sql_queries_to_load\n result = 0\n\n ActiveSupport::Notifications.subscribe('sql.active_record') do |_name, _start, _finish, _id, payload|\n result += 1\n end\n\n yield\n\n result\n end",
"def count **args\n\t\tquery( **( { projection: 'COUNT(*)' }.merge args )).execute(reduce: true){|x| x[:\"COUNT(*)\"]}\n end",
"def db_count_queries &block\n count = 0\n\n counter_f = ->(name, started, finished, unique_id, payload) {\n unless %w[ CACHE SCHEMA ].include? payload[:name]\n count += 1\n end\n }\n ActiveSupport::Notifications.subscribed(counter_f, \"sql.active_record\", &block)\n count\n end",
"def retrieved_records\n results.count\n end",
"def total_count\n @total_count ||= self.query.count\n end",
"def db_query_transform__count query\n tmp_table = \"resultset_table\"\n make_tmp_table = db_query_transform__subquery query, tmp_table\n \"SELECT COUNT(*) FROM #{make_tmp_table}\"\n end",
"def execute_sql\n # add conditions including the cache_ids and retrieve a count and all of the records\n return @model.find(:all,to_active_record)\n end",
"def t_counts (conn, log, t)\n log.d(\"Checking counts.\");\n q = \"SELECT COUNT(*) AS c FROM #{t}\";\n log.d(q);\n conn.query(q) do |r|\n log.d(r[:c]);\n end\nend",
"def count\n sql = order_by(nil).to_sql(pagination: false)\n\n @connection.ask(\"SELECT COUNT(*) FROM (#{sql}) _total_count\", *args)\n end",
"def count\n @options[:select] = \"COUNT\"\n @options.delete(:attributes_to_get)\n\n response = run\n\n while continue?(response)\n @options[:exclusive_start_key] = response.last_evaluated_key\n response = run(response)\n end\n\n response.count\n end",
"def getrowcount\n @rowcount = @db.execute(\"select count(*) from #{@tablename}\").to_s\nend",
"def count **args\n logger.progname = 'RestOperations#CountRecords'\n query = OrientSupport::OrientQuery.new args\n query.projection << 'COUNT (*)'\n result = get_records raw: true, query: query\n result.first['COUNT'] rescue 0 # return_value\n end",
"def count\n call_client(:count)\n end",
"def query_for_data\n if self.record_klass < ActiveRecord::Base\n self.dataset = self.record_klass.find(:all, self.query)\n \n count_query = self.query.reject do |k, v| \n [:limit, :offset, :order].include?(k.to_sym )\n end\n self.resultcount = self.record_klass.count(:all, count_query)\n \n elsif self.record_klass < ActiveResource::Base\n self.dataset = self.record_klass.find(:all, :params => self.query)\n self.resultcount = self.dataset.delete_at(self.dataset.length - 1).total\n else\n raise \"Unable to query for data. Supported base classes are 'ActiveRecord::Base' and 'ActiveResource::Base' but '#{self.record_klass}' was given\"\n end\n \n self.resultcount = self.resultcount.length if self.resultcount.respond_to?(:length)\n end",
"def count(query={})\n collection.count({ query: query })\n end",
"def count\n self.all.count\n end",
"def count(query)\n read(query).length\n end",
"def count ; @count ||= table.count end",
"def count(query)\n raise \"The class #{self.class.name} must implement the 'count' method.\"\n end",
"def count(selector={})\n @db.count(@name, selector || {})\n end",
"def count_records **args\n logger.progname = 'RestOperations#CountRecords'\n query = OrientSupport::OrientQuery.new args\n \tquery.projection << 'COUNT (*)'\n \tresult = get_records raw: true, query: query\n begin\n result.first['COUNT']\n rescue\n return 0\n end\n end",
"def stock_count\n sql = \"SELECT * FROM cars\n WHERE shop_id = $1\"\n value = [@id]\n cars = SqlRunner.run(sql, value)\n result_array = cars.map{|car| Car.new(car)}\n count = result_array.count\n return count\n end",
"def count(layer_idx=0, options={})\n return nil unless self.metadata[\"capabilities\"] =~ /Query/\n query(layer_idx, options.merge(:returnCountOnly => true))\n end",
"def count\n all.count\n end",
"def count(view = :all, *args, &block)\n if view == :all\n return super({}, *args) \n end\n \n if has_view?(view)\n query = args.shift || {}\n result = view(view, {:reduce => true}.merge(query), *args, &block)['rows']\n \n return result.first['value'] unless result.empty?\n end\n 0\n end",
"def result_count\n result_ids.size\n end",
"def contact_count\n Contact.visible.includes(:address).count(:conditions => statement)\n rescue ::ActiveRecord::StatementInvalid => e\n raise StatementInvalid.new(e.message)\n end",
"def query\n resp = {:records => []}\n status_key = STATUS[rand(STATUS.length)]\n ss = Spooge.find_on_redis(:status,status_key)\n resp[:record_count] = ss.length\n ss.each do |s|\n resp[:records] << s\n end \n render :json => resp\n end",
"def fetch_count\n if refresh_cache?\n execute_count\n elsif cached.is_a?(AridCache::CacheProxy::Result)\n cached.has_count? ? cached.count : execute_count\n elsif cached.is_a?(Fixnum)\n cached\n elsif cached.respond_to?(:count)\n cached.count\n else\n cached # what else can we do? return it\n end\n end",
"def count\n @count\n end",
"def count\n @count\n end",
"def count\n @count\n end",
"def customer_count()\n customers().count()\nend",
"def total_count\n return option_for(:count) if option_for(:count)\n\n c = except(:offset, :limit, :order)\n c = c.reorder(nil)\n c = c.count\n c.respond_to?(:count) ? c.count : c\n rescue ActiveRecord::StatementInvalid => e\n if e.to_s =~ /statement timeout/\n 1_000_000\n else\n raise\n end\n end",
"def count_bills\n sql = <<-SQL\n SELECT count(*) FROM (SELECT DISTINCT guest_name FROM guests WHERE guest_name LIKE \"Bill %\");\n SQL\n DB[:conn].execute(sql)[0][0]\nend",
"def item_count()\n sql = \"SELECT COUNT(id) AS num FROM items WHERE c_class_id = $1\"\n values=[id]\n return SqlRunner.run(sql,values).first['num'].to_i\n end",
"def count\n @count\n end",
"def count\n @rows.count\n end",
"def size\n\n fetch_all(:count => true)\n end",
"def execute_batch_counting(projects_relation)\n end",
"def records_total_count\n Rails.cache.fetch('raw_count') { get_raw_records.count(:all) }\n end",
"def count\n @count\n end",
"def count\n @count\n end",
"def count\n @count\n end",
"def count_people(query={})\n self.simple_client.get(\"/api/v1/people/count?#{query.to_query}\")[\"count\"]\n end",
"def users_count\n sql = 'SELECT COUNT(*)\n FROM users u\n WHERE EXISTS (SELECT 1\n FROM subscriptions s\n WHERE s.user_id = u.id);\n '\n result = ActiveRecord::Base.connection.execute sql\n result.first['count'].to_i\n end",
"def customer_count\n sql = \"SELECT c.* FROM customers c INNER JOIN tickets t ON t.customer_id = c.id WHERE t.film_id = #{@id};\"\n customers = SqlRunner.run(sql)\n return customers.count\nend",
"def count(extras = false)\n @count ||= klass.collection.find(selector, process_options).count(extras)\n end",
"def count\n raise \"View#count cannot be used with group options\" if query[:group]\n if can_reduce?\n row = reduce.skip(0).limit(1).rows.first\n row.nil? ? 0 : row.value\n else\n limit(0).total_rows\n end\n end",
"def count\n Dynamoid.adapter.count(table_name)\n end",
"def count\n Dynamoid.adapter.count(table_name)\n end",
"def getCrawlerCount\n db.execute(\"SELECT count(*) FROM #{@crawlerTable}\")[0][0]\n end",
"def pull_count\n base_scope.count\n rescue ::ActiveRecord::StatementInvalid => e\n raise StatementInvalid.new(e.message)\n end",
"def count_estimate\n estimate = estimated_count\n return estimate if estimate > EXACT_COUNT_THRESHOLD\n\n sql = order_by(nil).to_sql(pagination: false)\n @connection.ask(\"SELECT COUNT(*) FROM (#{sql}) _total_count\", *args)\n end",
"def api_count(args)\n query_str = args[:query]\n object_name = args[:entity].pluralize\n my_response = HTTParty.get(\"https://api.rechargeapps.com/#{object_name}/count?#{query_str}\",\n headers: HEADER)\n my_count = my_response['count'].to_i\n Resque.logger.info \"#{my_count} #{object_name}'s on Recharge API\"\n Resque.logger.info my_response\n my_count\n end",
"def message_count\n id = session[:userid]\n # sent_by_user = Status.filter(:owner_id => id).exclude(:recipient_id => nil).count # 1)\n # sent_to_user = Status.filter(:recipient_id => id).count\n # sent_by_user + sent_to_user\n \n # Using a single database query \n # http://sequel.rubyforge.org/rdoc/files/doc/dataset_filtering_rdoc.html => \"Using OR instead of AND\"\n Status.filter(:owner_id => id).exclude(:recipient_id => nil). # 2)\n or(:recipient_id => id).\n count\n end",
"def count\n @collection.count(Scope.new(@collection, @selector, @opts))\n end",
"def count\n transactions.count\n end",
"def call\n res = client.get('/api/rest/v1/users.json?action=count')\n res['data']['count']\n end",
"def count\n return @@all\n end",
"def count(*args)\n with_scope(:find => { :conditions => \"deleted_at is NULL\" }) { super }\n end",
"def total_results\n records&.size || 0\n end",
"def count\n Driver.client[coll_name].find.count\n end",
"def query_7\n document_ids = Perpetuity[Document].select {|document| document.id}.to_a\n return document_ids.size\nend",
"def query_count(query, idx, type = 'document')\n response = request(\n :count,\n index: idx,\n type: type,\n body: query)\n response['count'].to_i || 0\n end",
"def count(constraints = {})\n query(constraints).count\n end",
"def count(where_pattern)\n client.query(\"#{prefixes} SELECT (COUNT(distinct(?resource)) as ?count) FROM <#{graph}> WHERE { #{where_pattern} }\").first[\"count\"].value.to_i\n end",
"def count_all_entries(database)\n sql = <<-SQL\n SELECT hero_seen, COUNT(*) c FROM reports GROUP BY hero_seen HAVING c > 1\n SQL\n p database.execute(sql)\nend",
"def count\n resource = @client.send(:head, self.next, **@options)\n resource.get_response.total_records\n end",
"def count\n @mutex.synchronize do\n count_nonsynchronize\n end\n end",
"def count\n end",
"def count\n end",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def records_total_count\n fetch_records.unscope(:group, :select).count(:all)\n end",
"def meeting_count\n Meeting.visible.count(:include => [:project], :conditions => statement)\n rescue ::ActiveRecord::StatementInvalid => e\n raise StatementInvalid.new(e.message)\n end",
"def get_completed_count\n @list = User.joins(:todo_items).where(\"todo_items.completed = ?\", true)\n return @list.count\nend",
"def count( query )\n data = index_request({ \"q\" => query })\n return data[\"response\"][\"numFound\"]\n end",
"def num_results(_args = {})\n @num_results ||= result_ids&.count || 0\n end",
"def async_result()\n #This is a stub, used for indexing\n end",
"def count(opts = {})\n view = opts[:view] || @opts[:view]\n client.view(view,\n limit: 0,\n include_docs: false,\n stream: true).total_rows\n end",
"def query_len; query.size; end",
"def count\n @params[:rettype] = 'count'\n self\n end",
"def total_count(queue)\n collection.find(conditions(queue)).count\n end",
"def query_1\n document_ids = Perpetuity[Document].all.to_a.map(&:id)\n index_num = [ 42, 76, 44, 90, 8, 12, 4, 77, 43, 99]\n all_ids = []\n index_num.each do |num|\n all_ids << document_ids[num]\n end\n\n processed_number = 0\n all_ids.each do |id|\n Perpetuity[Document].find(id)\n processed_number+=1\n end\n return processed_number\nend",
"def count\n filtered = apply_criteria(@data)\n filtered.count\n end",
"def counts_number_of_bears_with_goofy_temperaments\n \"Write your SQL query here\"\n \"SELECT count(*) FROM bears WHERE temperament = 'goofy' \";\nend",
"def count\n collection.count\n end",
"def cargar_resultados\n preguntas.each do |pregunta|\n pregunta.respuestas.each do |respuesta|\n # resultados = ResultadoPregunta.where(recorrido: recorrido.id, pregunta: pregunta, respuesta: respuesta).count\n sql = \"select count(1)\n from resultado_preguntas r_preg join resultado_recorridos r_rec on r_preg.resultado_recorrido_id = r_rec.id join\n recorridos recorrido on r_rec.recorrido_id = recorrido.id\n where recorrido.id = #{recorrido.id} and r_preg.pregunta_id = #{pregunta.id} and r_preg.respuesta_id = #{respuesta.id}\"\n respuesta.cantidad_elegida = ActiveRecord::Base.connection.execute(sql)[0][\"count\"].to_i\n # respuesta.cantidad_elegida = resultados\n end\n end\n end",
"def test_find_count\n assert_equal(6, @coll.find().limit(2).count())\n end",
"def approx_count\n return count unless connection.respond_to?(:approx_count)\n a_count = connection.approx_count(self.table_name)\n return a_count unless a_count\n if a_count < 20000\n\tcount\n else\n\ta_count\n end\n end",
"def count\n Project.count\n end"
] | [
"0.6552634",
"0.6464964",
"0.6464964",
"0.63392395",
"0.61851805",
"0.590122",
"0.58874714",
"0.5857866",
"0.58360636",
"0.58178264",
"0.5779527",
"0.57781446",
"0.5759162",
"0.5729152",
"0.5725027",
"0.5710067",
"0.5634603",
"0.56287456",
"0.5612585",
"0.55990654",
"0.55801636",
"0.5573985",
"0.5540969",
"0.5465859",
"0.54333615",
"0.54322904",
"0.54213357",
"0.5418901",
"0.5412796",
"0.54012734",
"0.5392909",
"0.53874403",
"0.5344866",
"0.53374225",
"0.53250045",
"0.5313003",
"0.53128487",
"0.5312012",
"0.5311454",
"0.5311454",
"0.5311454",
"0.53019166",
"0.52975106",
"0.5274697",
"0.5268101",
"0.5266203",
"0.5264757",
"0.52599806",
"0.52593225",
"0.525468",
"0.52541023",
"0.52541023",
"0.52541023",
"0.5251282",
"0.5248231",
"0.52422166",
"0.52343655",
"0.5230816",
"0.52209485",
"0.52209485",
"0.521642",
"0.521514",
"0.5213738",
"0.519655",
"0.51917154",
"0.51897264",
"0.51891124",
"0.51885635",
"0.51747966",
"0.51731426",
"0.5172654",
"0.5169161",
"0.5167701",
"0.5155475",
"0.5151519",
"0.5145299",
"0.51430285",
"0.51386184",
"0.5138055",
"0.5124854",
"0.5124854",
"0.5118231",
"0.51024437",
"0.50997764",
"0.50974095",
"0.5084214",
"0.50752246",
"0.507405",
"0.50732577",
"0.5064979",
"0.5062995",
"0.5058372",
"0.5057314",
"0.50571626",
"0.5054047",
"0.5050176",
"0.50491637",
"0.50464123",
"0.5043244",
"0.50361353"
] | 0.7223257 | 0 |
Calculates the average value on a given column. Returns +nil+ if there's no row. See calculate for examples with options. Person.average(:age) => 35.8 | def average(column_name)
calculate(:average, column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def average(column)\n if s = sum(column)\n s / _all_with_present_column(column).count.to_f\n end\n end",
"def column_average(input_query, key)\n output = input_query.average(key)\n number_with_precision(output, precision: 1)\n end",
"def avg(field)\n total = sum(field)\n total ? (total / count) : nil\n end",
"def avg(heading)\n sum(heading)/rows.size.to_f\n end",
"def average\n if self.length > 0\n return self.sum / (self.length * 1.0)\n else\n return nil\n end\n end",
"def average_student_gpa\n \"SELECT AVG(gpa) from students;\"\nend",
"def average(attribute_name, options = {})\n calculate(:avg, attribute_name, options)\n end",
"def average\n\t\tif self.length > 0\n\t\t\t#\tsum defined in activesupport/lib/active_support/core_ext/enumerable.rb\n\t\t\tself.digitize.sum.to_f / self.length\n\t\telse\n\t\t\tnil\n\t\tend\n\tend",
"def avg(field)\n aggregates(field)[\"avg\"]\n end",
"def calculate_average\n return 0 unless reviews.size.positive?\n avg = self.reviews.average(:rating) #.to_f.round(2) # * 100\n update_column(:rating, avg)\n end",
"def async_average(column_name)\n async.average(column_name)\n end",
"def get_average\n @average ||= calculate_average\n end",
"def average\n @data['average']\n end",
"def rate_average(cached = true, dimension = nil)\n avg = if cached && self.class.caching_average?(dimension)\n send(caching_column_name(dimension)).to_f\n else\n self.rates_sum(dimension).to_f / self.total_rates(dimension).to_f\n end\n avg.nan? ? 0.0 : avg\n end",
"def mean\n if self.length == 0\n nil\n else\n self.total_sum / self.length\n end\n end",
"def calc_average!\n update!(avg_rating: ratings.average(:rating))\n end",
"def average\n return nil if empty?\n sum = n = 0\n if block_given?\n each { |x| n += 1; sum += (yield x) }\n else\n each { |x| n += 1; sum += x }\n end\n sum.to_f / n\n end",
"def avg_rating(attribute)\n\t\tif reviews.none?\n\t\t val = 0\n\t\telse \n\t\t val = reviews.average(attribute)\n\t\tend\n\t\treturn val\n\tend",
"def avg(options = {})\n c = count(options)\n return nil if c.zero? # if the array is empty will be returned nil\n sum(options) / c\n end",
"def odb_avg\n \"avg(#{to_s})\"\n end",
"def average_score\n grades.average(:score) || 0\n end",
"def find_average \n result = array.sum(0.0)/array.size\n return result\n end",
"def average (anArray)\n\n\t#if the array is empty...\n\tif anArray.empty?\n\t\treturn 0.0\n\n\t# otherwise, return the sum divided by the number of values\n\telse\n\t\treturn sum(anArray) / anArray.size\n\tend\nend",
"def average_rating\n ratings.present? ? (ratings.map(&:score).sum.to_f) / ratings.count : nil\n end",
"def mean(arr = nil)\n arr ||= scores\n return (arr.reduce(0) { |sum, item| sum + item }.to_f / arr.count) unless arr.empty?\n nil\n end",
"def mean(attr)\n n = count(attr)\n if n.zero?\n 0\n else\n sum(attr) / n\n end\n end",
"def mean\n variance =0\n count = 0.0\n @data.each do |line|\n true_rating = line[2]\n guess_rating = line[3]\n count = count + 1\n variance = (true_rating.to_i - guess_rating.to_i).abs + variance\n end\n return (variance/count.to_f)\n end",
"def avg_rating\n @avg = self.ratings.average(:rating) \n @avg ? @avg : 0\n end",
"def averageif( find_header, avg_header )\n return to_enum( :sumif ) unless block_given?\n find_col, avg_col = ch( find_header ), ch( avg_header )\n sum = find_col.each_cell_wh.inject([0,0]) do |sum,ce|\n if yield( ce.value )\n sum[0] += avg_col[ ce.row ]\n sum[1] += 1\n sum \n else\n sum\n end\n end\n sum.first.to_f / sum.last\n end",
"def average\n @array.inject(0.0) {|total, n| total + n} / @array.size\n end",
"def average_rating\n ratings.average(:value).to_f\n end",
"def average()\n if(@countN > 0) then\n return @sum / @countN ;\n else\n return 0.0 ;\n end\n end",
"def average_rating\n if ratings.where(\"score > 0\").size != 0\n\t\tratings.where(\"score > 0\").sum(:score) / ratings.where(\"score > 0\").size\n\telse\n\t\t0\n\tend\n end",
"def average (anArray)\n if (anArray.length <= 0)\n return 0.0;\n else\n return sum(anArray) / anArray.length;\n end\nend",
"def average_player_age\n players.average(:age)\n end",
"def average(anArray)\n if anArray.empty? then\n return 0.0\n else\n return sum(anArray) / anArray.size\n end \nend",
"def average (array)\n if array.empty? then\n 0\n else\n (sum(array)/array.size)\n end\nend",
"def average\n return @@average_times.inject(:+).to_f / @@average_times.size\n end",
"def avg \n\t\t# prevent divide by zero\n\t\tif (self[:at_bats] != 0)\n\t\t\t(self[:hits].to_i / self[:at_bats].to_f).round(3)\n\t\telse\n\t\t\t0\n\t\tend\n\tend",
"def getAverage(arg1 , arg2 , arg3, arg4)\n return (arg1+arg2+arg3+arg4)/4.0\n end",
"def average\n @grades.reduce(0,:+) / @grades.size.to_f\n end",
"def mean()\n\t\taverage = 0.0\n\t\t@results.each do |row|\n\t\t\taverage += (row[\"rating\"].to_f - row[\"predicted\"]).abs\n\t\tend\n\n\t\treturn average/results.length\n\tend",
"def mean\n get_mean\n end",
"def mean\n Statistics.mean @data\n end",
"def calc_mean\n $mean = Emoneyoutstanding.average(:amount, :conditions => \"date > '#{Date.current - 180}'\")\n return $mean\n end",
"def avg_score_vote\n sql=<<-SQL\n select\n sum(\n case action\n when 1 then 0\n when 2 then 0\n when 3 then 1\n when 4 then 1\n when 5 then 1\n end\n ) / count(*) as sant\n from votes\n SQL\n (ActiveRecord::Base.connection.execute(sql).fetch_row[0].to_f * 100).round\n end",
"def cal_mean\n sum = @data.inject(0) do |accu, hash|\n accu + hash[:prediction] - hash[:rating]\n end\n sum.to_f / @data.size\n end",
"def average\n if self.critics.size>0\n begin\n self.critics.map{|i| i.score.to_f}.inject{ |sum, el| sum + el }.to_f / self.critics.size.to_f\n rescue\n nil\n end\n end\n end",
"def average(array)\n if !array.is_a?(Array)\n return nil\n else \n sum = 0\n array.each do |i|\n sum += i\n end\n return sum / (array.length * 1.0)\n end\nend",
"def average_age\n @ages.sum.to_f / ages.count\n end",
"def average(anArray)\n\tif anArray.size > 0 then\n\t\treturn sum(anArray) / anArray.size\n\telse\n\t\treturn 0.0\n\tend\nend",
"def average_rating\n (ratings.sum(:rating).to_f / num_ratings).round(1)\nend",
"def avg_rating(user_id)\n @user_index[user_id].col(:rating).mean\n end",
"def mean\n @sum / @count\n end",
"def average(array)\n if array.size <= 0\n return 0.0\n end\n return sum(array) / array.size\nend",
"def mean\n @mean\n end",
"def average\n\t\treturn self.sum / self.length.to_f\n\tend",
"def calc_mean(ary)\n if !ary.is_a?(Array)\n 0\n elsif ary.empty?\n 0\n else\n # Your code goes here \n\n #add the numbers \n #divide by the amount of numbers in array\n temp = 0\n ary.each do |index|\n temp = temp + index\n\n end\n\n mean = temp / ary.size\n\n return mean\n end\nend",
"def average_for_attribute(\n attribute, include_domestic_consumption = true\n )\n attribute_type = attribute.class.name.demodulize.downcase\n value_table = \"node_#{attribute_type}s\"\n # rubocop:disable Layout/LineLength\n query = basic_query(attribute, include_domestic_consumption).\n select(\n \"nodes.id AS node_id\",\n \"AVG(#{value_table}.value) OVER (PARTITION BY #{value_table}.year) AS value\",\n \"#{value_table}.year\"\n )\n # rubocop:enable Layout/LineLength\n\n Node.from(\"(\" + query.to_sql + \") s\").\n select(\"s.*\").\n where(\"s.node_id\" => @node.id).\n order(nil)\n end",
"def average\n check_numeric_array!\n a = numerify\n a.sum / length.to_f\n end",
"def average(array)\n return 0 if array.empty?\n array.sum / array.size\nend",
"def average_rating\n comments.average(:rating).to_f\n end",
"def average_rating\n comments.average(:rating).to_f\n end",
"def average_rating\n comments.average(:rating).to_f\n end",
"def average_rating\n comments.average(:rating).to_f\n end",
"def average numbers\n\nend",
"def average(anArray)\n avg = 0\n sum = sum(anArray)\n if (anArray.length > 0) then\n avg = sum / anArray.length\n else\n return 0\n end\nend",
"def mean(options = {})\n c = count(options)\n return nil if c.zero? # if the array is empty will be returned nil\n sum(options) / c\n end",
"def avg_reviews\n @avg = self.reviews.average(:rating) \n @avg ? @avg : 0\nend",
"def average\n return self.sum / self.length.to_f\n end",
"def get_mean\n end",
"def grade_average\n results = Assignment.find_by_sql([\"\n SELECT (sum(grades.grade * assignments.weight)/sum(assignments.weight)) AS total\n FROM assignments\n LEFT JOIN grades ON (assignments.id = grades.assignment_id)\n WHERE assignments.school_id=? and grades.user_id=? and grades.grade >= 0\n GROUP BY grades.user_id\",self.school,self.id])\n return results[0]['total'].to_i unless results.count == 0\n end",
"def average_rating\r\n comments.average(:rating).to_f\r\n end",
"def average\n return self.sum/self.length.to_f\n end",
"def average\n return self.sum/self.length.to_f\n end",
"def average\n return self.sum/self.length.to_f\n end",
"def average\n return self.sum/self.length.to_f\n end",
"def average\n return self.sum/self.length.to_f\n end",
"def average\n return self.sum/self.length.to_f\n end",
"def average_rating\n ratings = book_reviews.pluck(:rating)\n ratings.size.zero? ? nil : (ratings.sum / ratings.size.to_f).round(1)\n end",
"def avg\n only_with('avg', 'DateTime', 'Numeric')\n itms = items.compact\n size = itms.size.to_d\n if type == 'DateTime'\n avg_jd = itms.map(&:jd).sum / size\n DateTime.jd(avg_jd)\n else\n itms.sum / size\n end\n end",
"def average_age\n self.bloggers.average(\"age\").to_i\n end",
"def mean(ary)\n # your implementation here\n avg = ary.reduce(0) {|item,acc| acc + item} / ary.length\n avg\nend",
"def getavg\r\n\t\tif @duration == 0\r\n\t\t\treturn 0\r\n\t\telse\r\n\t\t\treturn ((@miles_driven.to_f / @duration)*60)\r\n\t\tend\r\n\tend",
"def average\n total_sum = @person_array.inject(0){ |sum, p| sum += p.awesomeness } \n size = @person_array.size \n return (total_sum / size)\n end",
"def average(ary)\n ary.sum / ary.length\nend",
"def average_age\n total_age = 0\n @@all.each do |manager|\n total_age += manager.age\n end\n (total_age/ (@@all.length -1))\n end",
"def average\n self.sum / self.length.to_f\n end",
"def average\n self.sum / self.length.to_f\n end",
"def average_rating\n return 'n/a' if reviews.empty?\n #return 0 if reviews.empty?\n (reviews.pluck(:rating).sum / reviews.count.to_f).round(2)\n end",
"def get_mean()\n end",
"def get_average(total_rating, rides)\n\n if rides > 0\n average = total_rating.to_f / rides\n else\n average = 0\n end\n return average\n\nend",
"def team_average\n # This version is implemented as a database AVG operation,\n # but it cannot be eager loaded so it results in an extra\n # database query for each project:\n #\n # avg = students_projects.select(:points).average :points\n # avg ? avg.round : 0\n\n # This version programmatically finds the average of the points:\n #self.reload\n no_of_students = self.students_projects.length\n return 0 if no_of_students == 0\n total = 0\n self.students_projects.each { |s| total += s.points }\n (total / no_of_students).round\n end",
"def mean(ary)\n\tsum=0.0\n\tary.each {|item| sum+=item}\n\tsum/ary.count\nend",
"def get_mean(collection)\n collection_length = collection.length\n total = 0\n total = collection.inject( nil ) { |sum,x| sum ? sum+x : x }\n (total.to_f / collection_length.to_f)\n end",
"def average_stats\n Review.find_by_sql([\"\n SELECT \n ROUND((AVG(rv.rating)::numeric),1) as total_rating,\n ROUND((AVG(rv.coffee_rating)::numeric),1) as total_coffee,\n ROUND((AVG(rv.food)::numeric),1) as total_food,\n ROUND((AVG(rv.noise_level)::numeric),1) as total_noise_level,\n ROUND((AVG(rv.work_friendly)::numeric),1) as total_work_friendly,\n COUNT(rv.rating) as total_reviews_count\n FROM reviews AS rv\n WHERE rv.coffee_shop_id = ?\n \", id]).first\n end",
"def sum(column)\n result = all\n\n if result.any?\n result.inject(0.0) do |acc, record|\n if value = record.public_send(column)\n acc += value\n end\n\n acc\n end\n end\n end",
"def mean\n end",
"def rating_calculation\n ratings_collection = Rating.where(user_id: self.id)\n average = -1\n if !ratings_collection.empty?\n sum = 0\n ratings_collection.each do |r|\n sum += r.rating\n end\n average = (sum.to_f / ratings_collection.count).round\n end\n average\n end",
"def get_avg(num_1, num_2)\n (num_1 + num_2) / 2\nend"
] | [
"0.8124579",
"0.6897474",
"0.66772103",
"0.66442484",
"0.66257405",
"0.6408009",
"0.6398571",
"0.63593835",
"0.63576573",
"0.635402",
"0.63029486",
"0.6258484",
"0.61673945",
"0.6148234",
"0.60933393",
"0.6086343",
"0.60742986",
"0.6063052",
"0.6054823",
"0.6044952",
"0.6020603",
"0.59829515",
"0.59382135",
"0.5896523",
"0.589477",
"0.587977",
"0.58730316",
"0.58602446",
"0.58576965",
"0.58513063",
"0.58310324",
"0.5814919",
"0.58148086",
"0.5808424",
"0.57796335",
"0.5762462",
"0.57349205",
"0.57310313",
"0.57215494",
"0.57167095",
"0.57028323",
"0.57003653",
"0.5694471",
"0.56931406",
"0.5687695",
"0.5682963",
"0.56821895",
"0.5679206",
"0.5674878",
"0.5674235",
"0.5673178",
"0.56621265",
"0.5658858",
"0.5651171",
"0.56500477",
"0.56464005",
"0.5646344",
"0.56354195",
"0.563337",
"0.5626927",
"0.5603962",
"0.558992",
"0.558992",
"0.558992",
"0.558992",
"0.5586763",
"0.5582824",
"0.55821824",
"0.5581223",
"0.5575023",
"0.55635613",
"0.55620825",
"0.5562063",
"0.5556212",
"0.5556212",
"0.5556212",
"0.5556212",
"0.5556212",
"0.5556212",
"0.5548314",
"0.5547877",
"0.55359954",
"0.5535877",
"0.5527636",
"0.5527148",
"0.5526742",
"0.5523012",
"0.55152076",
"0.55152076",
"0.5506855",
"0.5503548",
"0.5502347",
"0.5501187",
"0.549925",
"0.5497864",
"0.5493193",
"0.54928184",
"0.54926854",
"0.5489955",
"0.54873985"
] | 0.80437046 | 1 |
Same as average but perform the query asynchronously and returns an ActiveRecord::Promise | def async_average(column_name)
async.average(column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def average_student_gpa\n \"SELECT AVG(gpa) from students;\"\nend",
"def async_sum(identity_or_column = nil)\n async.sum(identity_or_column)\n end",
"def calculate(operation, column_name)\n operation = operation.to_s.downcase\n\n if @none\n case operation\n when \"count\", \"sum\"\n result = group_values.any? ? Hash.new : 0\n return @async ? Promise::Complete.new(result) : result\n when \"average\", \"minimum\", \"maximum\"\n result = group_values.any? ? Hash.new : nil\n return @async ? Promise::Complete.new(result) : result\n end\n end\n\n if has_include?(column_name)\n relation = apply_join_dependency\n\n if operation == \"count\"\n unless distinct_value || distinct_select?(column_name || select_for_count)\n relation.distinct!\n relation.select_values = [ klass.primary_key || table[Arel.star] ]\n end\n # PostgreSQL: ORDER BY expressions must appear in SELECT list when using DISTINCT\n relation.order_values = [] if group_values.empty?\n end\n\n relation.calculate(operation, column_name)\n else\n perform_calculation(operation, column_name)\n end\n end",
"def call\n query = <<-SQL\n WITH events_first_submit as (\n SELECT DISTINCT ON (enrollment_id) *\n FROM events WHERE name = 'submitted' ORDER BY enrollment_id, created_at DESC\n )\n SELECT TO_CHAR(\n AVG (validation_duration),\n 'FM999999999'\n )\n FROM (\n SELECT\n enrollments.id, events_stop.created_at AS done_at, events_first_submit.created_at AS submitted_at,\n DATE_PART('days', events_stop.created_at - events_first_submit.created_at) AS validation_duration\n FROM enrollments\n INNER JOIN\n events AS events_stop ON events_stop.enrollment_id = enrollments.id\n AND events_stop.name IN ('validated', 'refused')\n INNER JOIN\n events_first_submit ON events_first_submit.enrollment_id = enrollments.id\n WHERE status IN ('validated', 'refused')\n AND #{@filter_by_target_api_criteria}\n ) e;\n SQL\n\n ActiveRecord::Base\n .connection\n .execute(query)\n .getvalue(0, 0)\n end",
"def get_average\n @average ||= calculate_average\n end",
"def get_avg\n recentReviewed = get_user_cards(Result.all)\n filteredDates = get_dates(recentReviewed)\n totalScore = filteredDates.reduce(0) {|curr,n| curr + n.score} \n totalScore/filteredDates.count \n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def avg_score_vote\n sql=<<-SQL\n select\n sum(\n case action\n when 1 then 0\n when 2 then 0\n when 3 then 1\n when 4 then 1\n when 5 then 1\n end\n ) / count(*) as sant\n from votes\n SQL\n (ActiveRecord::Base.connection.execute(sql).fetch_row[0].to_f * 100).round\n end",
"def aggregate\n #response = Result.collection.map_reduce(self.map_fn(), _reduce(), :raw => true, :out => {:inline => true}, :query => {:execution_id => id})\n response = Result.where(execution_id: id).map_reduce(self.map_fn(), self.query.reduce).out(inline: true).raw()\n results = response['results']\n if results\n self.aggregate_result = {}\n results.each do |result|\n result = prettify_generated_result(result) if self.query.generated? && result['value']['rereduced']\n self.aggregate_result[result['_id']] = result['value']\n end\n save!\n end\n end",
"def async_result()\n #This is a stub, used for indexing\n end",
"def average_karma\n average = QuestionsDatabase.instance.execute(<<-SQL,@id,@id)\n SELECT (CASE\n WHEN numquestions.questions = 0\n THEN NULL\n ELSE\n numlikes.likes/numquestions.questions\n END) avg\n FROM\n ( SELECT COUNT(*) likes\n FROM question_likes\n WHERE question_likes.user_id = (?) ) numlikes\n CROSS INNER JOIN\n ( SELECT COUNT(*) questions\n FROM questions\n WHERE questions.user_id = (?) ) numquestions\n SQL\n average[\"avg\"]\n end",
"def rating_calculation\n ratings_collection = Rating.where(user_id: self.id)\n average = -1\n if !ratings_collection.empty?\n sum = 0\n ratings_collection.each do |r|\n sum += r.rating\n end\n average = (sum.to_f / ratings_collection.count).round\n end\n average\n end",
"def mean_response(records)\n if records.length == 0\n return 0.0\n end\n\n return records.map {|r| r.response_time}.reduce(:+) / records.length.to_f\nend",
"def average_population\n\n sum = 0\n count = 0\n\n @query.get_graph.each_key do |city|\n population = get_city_info(city,\"population\")\n sum+=population\n count+=1\n end\n\n return sum/count\n\n end",
"def average(producer)\n total = 0\n @values.each do |result|\n total += producer.call(result)\n end\n avg = total / @values.length\n return avg.to_f\n end",
"def calc_average!\n update!(avg_rating: ratings.average(:rating))\n end",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def fetch(name, opts = {})\n return [] unless key = get_key(name)\n\n # TODO API inconsistent return format again:\n return last(name) if opts[:single] == 'last'\n return previous(name) if opts[:single] == 'previous'\n\n return fetch_timespans(key, opts) if opts[:as] == 'timespans'\n return fetch_bool(key, opts) if key[:type] == 'boolean'\n\n\n # below only numeric type\n scope = @db[:numeric_data_points].where(key_id: key[:id])\n scope = scope.where('time > :since', since: opts[:since]) if opts[:since]\n scope = scope.where('time < :until', until: opts[:until]) if opts[:until]\n\n if opts[:step]\n s = step opts[:step] # see Engine::Base#step\n # TODO use date_trunc for pg (main use case)\n if @db.adapter_scheme == :postgres\n f_time = \"floor((extract(epoch from time::timestamp with time zone)/#{s[:span]}))*#{s[:span]}\"\n scope = scope.select(Sequel.lit \"to_timestamp(#{f_time}) AS time\").group(Sequel.lit f_time).select_append { avg(value_avg).as(value_avg) }\n else # sqlite\n f_time = \"(strftime('%s',time)/#{s[:span]})*#{s[:span]}\"\n scope = scope.select(Sequel.lit \"datetime(#{f_time}, 'unixepoch') AS time\").group(Sequel.lit f_time).select_append { avg(value_avg).as(value_avg) }\n end\n end\n\n scope = scope.order(Sequel.desc(:time)).limit(100) # TODO limit option and order, these are defaults for testing\n #puts \"SQL: #{scope.sql}\"\n rows = scope.all\n rows.reverse.map do |r|\n [time_wrap(r[:time]), r[:value_avg]]\n end\n end",
"def average_stats\n Review.find_by_sql([\"\n SELECT \n ROUND((AVG(rv.rating)::numeric),1) as total_rating,\n ROUND((AVG(rv.coffee_rating)::numeric),1) as total_coffee,\n ROUND((AVG(rv.food)::numeric),1) as total_food,\n ROUND((AVG(rv.noise_level)::numeric),1) as total_noise_level,\n ROUND((AVG(rv.work_friendly)::numeric),1) as total_work_friendly,\n COUNT(rv.rating) as total_reviews_count\n FROM reviews AS rv\n WHERE rv.coffee_shop_id = ?\n \", id]).first\n end",
"def get_mean\n end",
"def average_karma\n my_questions = QuestionsDatabase.instance.execute(<<-SQL)\n SELECT DISTINCT\n (CAST(COUNT(question_likes.id) AS FLOAT) / COUNT(DISTINCT(questions.id))) AS average_likes\n FROM\n questions\n LEFT OUTER JOIN\n question_likes ON question_likes.questions_id = questions.id\n -- JOIN\n -- users ON questions.user_id = users.id\n WHERE\n question_likes.user_id = #{@id}\n SQL\n my_questions\n end",
"def average_ask\n asks = @asks.map { |x| x.fetch(:price) }\n asks.inject(:+) / asks.count\n end",
"def get_mean()\n end",
"def mean\n @sum / @count\n end",
"def getResultTable(_nth = :all, _sync = false)\n _nthForLoop = ((_nth == :average || _nth == :stat) ? :all : _nth) ;\n if(done?(_sync, _nthForLoop)) then\n _resultList = [] ;\n doWithNthRun(_nthForLoop){|_run|\n if(_run.status == :finished) then\n _resultList.push(_run.result) ;\n end\n }\n if(_nth == :all) then\n return _resultList ;\n elsif(_nth == :average || _nth == :stat) then\n _stat = {} ;\n _resultList.each{|_result|\n _result.each{|_key, _value|\n _stat[_key] = Stat::StatInfo.new() if(_stat[_key].nil?) ;\n _stat[_key].put(_value) ;\n }\n }\n if(_nth == :average) then\n _ave = {} ;\n _stat.each{|_key, _value| _ave[_key] = _value.average() ;}\n return _ave ;\n else\n return _stat ;\n end\n else\n return _resultList.first ;\n end\n else\n return nil ;\n end\n end",
"def mean\n Statistics.mean @data\n end",
"def average\n return nil if empty?\n sum = n = 0\n if block_given?\n each { |x| n += 1; sum += (yield x) }\n else\n each { |x| n += 1; sum += x }\n end\n sum.to_f / n\n end",
"def average_karma\n result = QuestionsDatabase.instance.execute(<<-SQL, self.id)\n SELECT\n COUNT(question_likes.user_id) / CAST (COUNT(DISTINCT question_likes.question_id) AS FLOAT)\n FROM\n question_likes\n LEFT OUTER JOIN\n questions ON question_likes.question_id = questions.id\n JOIN\n users ON users.id = questions.author_id\n WHERE\n users.id = 0\n SQL\n end",
"def running_average; end",
"def average\nlift_Average = @@all.map do |lyft|\n lyft.lift_total\nend\nlift_Average.sum / lift_Average.size\nend",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def exercise3\n @content = ActiveRecord::Base.connection.execute(\"\n SELECT\n gr.name as group,\n u.name as name,\n SUM(m.mapviews) as views,\n CONCAT(CAST(ROUND((SUM(m.mapviews)*100)/SUM(SUM(m.mapviews)) OVER (PARTITION BY gr.name),2) as text), '%') as percent_of_group_views\n FROM (((users as u\n INNER JOIN groups_users as gu ON u.id=gu.user_id)\n INNER JOIN groups as gr ON gr.id = gu.group_id)\n INNER JOIN maps as m ON m.user_id = u.id)\n GROUP BY (gr.name, u.name)\n ORDER BY gr.name ASC, percent_of_group_views DESC;\");\n\n @results3 = []\n\n index = 0\n @content.each do |r|\n @results3[index] = Result3.new r\n index = index + 1;\n end\n\n return @results3\n end",
"def average(range = nil)\n return (range == nil ? self.transactions : self.transactions.all(:date => range)).avg(:amount)\n end",
"def calculate_and_update_post_avg_rating(payload)\n Try do\n post_repo.update_avg_rating(payload)\n end.to_result\n end",
"def investor_avg_rating_by_id(id)\n return run_sql(\"SELECT to_char(AVG(rating),'9D9') FROM investor_ratings WHERE investor_id = #{id};\")\nend",
"def mean\n get_mean\n end",
"def mean\n redis.hget(bucket_key, RedisBackend::MEAN_FIELD).to_f\n end",
"def mean\n sum / count.to_f\n end",
"def average_duration(contacts, period_start_field, period_end_field)\n contacts.select(\"ROUND(AVG(EXTRACT(EPOCH FROM contacts.#{period_end_field}- contacts.#{period_start_field}))) AS avg\")[0]['avg'] || 0\n end",
"def update_post_avg\n post.calc_average!\n end",
"def get_rating_averages\n set = GumRatingRelationship.where(:gum_id => self.id)\n return([set.average(:rank_1), set.average(:rank_2), set.average(:rank_3), set.average(:rank_4), set.average(:rank_5) ])\n end",
"def offer_average\n if self.offers.any?\n return self.offers.all.sum(&:figure)/self.offers.count\n else\n return \"No offers yet...\"\n end\n end",
"def avg_rating\n visits.avg_rating\n end",
"def async_count(column_name = nil)\n async.count(column_name)\n end",
"def calc_mean\n $mean = Emoneyoutstanding.average(:amount, :conditions => \"date > '#{Date.current - 180}'\")\n return $mean\n end",
"def running_average=(_arg0); end",
"def calculate_average\n return 0 unless reviews.size.positive?\n avg = self.reviews.average(:rating) #.to_f.round(2) # * 100\n update_column(:rating, avg)\n end",
"def calculate_metric_average(model_class, node_id, path, timestamp)\n average = model_class.find_by_node_id_and_path_and_timestamp!(node_id, path, timestamp)\n average.set(:counter => average.total / average.count)\n end",
"def mean()\n\t\taverage = 0.0\n\t\t@results.each do |row|\n\t\t\taverage += (row[\"rating\"].to_f - row[\"predicted\"]).abs\n\t\tend\n\n\t\treturn average/results.length\n\tend",
"def average_price\n populate_price_data unless @average_price\n @average_price\n end",
"def aggregated_over_time_query\n # TODO Remember to implement permitted parameters here\n query = @grouping_class.new(sanitized_attributes, params)\n @aggregated_over_time_data = Rails.cache.fetch(['aggregated_over_time_data', params], expires_in: 1.week) do\n query.aggregated_over_time_data\n end\n\n render json: @aggregated_over_time_data\n end",
"def mean\n @mean\n end",
"def aggregates\n Rails.cache.fetch(\"aggregates_#{interval}_#{cache_time}\", expires_in: self.cache_time) {\n ActiveRecord::Base.connection.exec_query(\"\n select\n stddev(sum_downvotes) as stddev,\n sum(sum_downvotes) as sum,\n avg(sum_downvotes) as avg,\n avg(n_comments) as n_comments,\n count(*) as n_commenters\n from (\n select\n sum(downvotes) as sum_downvotes,\n count(*) as n_comments\n from comments join users on comments.user_id = users.id\n where\n (comments.created_at >= '#{period}') and\n users.banned_at is null and\n users.deleted_at is null\n GROUP BY comments.user_id\n ) sums;\n \").first.symbolize_keys!\n }\n end",
"def query(&block)\n @delegate.query(block)\n end",
"def average_rating\n sum = self.queue_items.map {|item| item.rating}.reduce(:+)\n length = self.queue_items.length\n sum / length\n end",
"def user_average_rating(user)\n #av = promedio (avarage), counter = contador(para calcular el total de reviews realizada)\n av, counter = 0.0, 0.0\n \n Review.where(user_id: user.id).each_with_index do |review, i|\n if review.rating\n av = av + review.rating\n counter = counter + 1.0\n end\n end\n\n av / counter\n end",
"def calculate_house_avgLoad(s_i)\n query = \"SELECT load FROM AveragePlugLoads WHERE house_id = %d \" \\\n \"AND slice_index = %d\" % [house_id, s_i]\n\n results = execute_query(query)\n loads = results.map{|row| row[\"load\"]}\n puts \"EXECUTED: #{query}, GOT #{loads}\" if DEBUG\n sum(loads)\n end",
"def run(&block)\n @repository.query(self, &block)\n end",
"def execute_simple_calculation(operation, column_name, distinct) #:nodoc:\n # LIMIT 0 clauses on aggregate queries will return a 0 result\n # no need to query salesforce for that\n return 0 if has_limit_or_offset? && limit_value == 0\n\n if operation == \"count\" && (column_name == :all && distinct || has_limit_or_offset?)\n # Shortcut when limit is zero.\n \n query_builder = build_count_subquery(spawn, column_name, distinct)\n else\n # PostgreSQL doesn't like ORDER BY when there are no GROUP BY\n relation = unscope(:order).distinct!(false)\n\n column = aggregate_column(column_name)\n select_value = operation_over_aggregate_column(column, operation, distinct)\n\n relation.select_values = [select_value]\n\n query_builder = relation.arel\n end\n\n result = skip_query_cache_if_necessary { @klass.connection.select_all(query_builder, nil) }\n row = result.first\n\n value = row && row.fetch(\"expr0\")\n\n type = type_for(column_name)\n \n type_cast_calculated_value(value, type, operation)\n end",
"def sync\n raise \".sync can only be used on the client\" if Volt.client?\n\n result = nil\n error = nil\n\n self.then do |val|\n result = val\n end.fail do |err|\n error = err\n end\n\n if error\n err_str = \"Exception in Promise at .sync: #{error.inspect}\"\n err_str += error.backtrace.join(\"\\n\")\n Volt.logger.error(err_str)\n fail error\n else\n return result\n end\n end",
"def grade_average(school_id,user_id)\n results = Assignment.find_by_sql([\"\n SELECT (sum(grades.grade * assignments.weight)/sum(assignments.weight)) AS total\n FROM assignments\n LEFT JOIN grades ON (assignments.id = grades.assignment_id)\n WHERE assignments.school_id=? and grades.user_id=?\n GROUP BY grades.user_id\",school_id,user_id])\n grade = results[0]['total'].to_i unless results.count == 0\n pending = \"pending\"\n return grade || pending\n end",
"def perform_query(gql, period, query)\n behavior = query.behavior\n\n return behavior.fallback_value unless behavior.period_supported?(period)\n\n value = gql.public_send(:\"query_#{ period }\", query)\n\n # Rails 4.1 JSON encodes BigDecimal as a string. This is not part of\n # the ETEngine APIv3 spec.\n value = value.to_f if value.is_a?(BigDecimal)\n\n behavior.process_result(nan?(value) ? 0.0 : value)\n rescue Exception => exception\n # TODO Exception is *way* too low level to be rescued; we could do\n # with a GraphError exception for \"acceptable\" graph errors.\n @errors.push(\"#{ query.key }/#{ period } - #{ exception.message } | \" \\\n \"#{ exception.backtrace.join(\"\\n\") }\")\n nil\n end",
"def run_query(q)\n return sky_table.query(q)\n end",
"def average_wait_time\n self.reviews.average(:wait_time).to_i\n end",
"def grade_average\n results = Assignment.find_by_sql([\"\n SELECT (sum(grades.grade * assignments.weight)/sum(assignments.weight)) AS total\n FROM assignments\n LEFT JOIN grades ON (assignments.id = grades.assignment_id)\n WHERE assignments.school_id=? and grades.user_id=? and grades.grade >= 0\n GROUP BY grades.user_id\",self.school,self.id])\n return results[0]['total'].to_i unless results.count == 0\n end",
"def team_average\n # This version is implemented as a database AVG operation,\n # but it cannot be eager loaded so it results in an extra\n # database query for each project:\n #\n # avg = students_projects.select(:points).average :points\n # avg ? avg.round : 0\n\n # This version programmatically finds the average of the points:\n #self.reload\n no_of_students = self.students_projects.length\n return 0 if no_of_students == 0\n total = 0\n self.students_projects.each { |s| total += s.points }\n (total / no_of_students).round\n end",
"def avg(field)\n total = sum(field)\n total ? (total / count) : nil\n end",
"def find_avg(offers)\n if offers.count != 0\n temp =[]\n offers[0].each{|i|\n temp << i.price\n }\n temp.sum.to_f/temp.count\n else\n 0\n end\n\n end",
"def avg\n total_time = 0\n for url in self.keys\n total_time += self[url]\n end\n return total_time / self.size\n end",
"def execute(async)\n f = Fiber.current\n begin\n conn = acquire(f)\n conn.acquired_for_connection_pool += 1\n yield conn\n ensure\n conn.acquired_for_connection_pool -= 1\n conn.run_postponed_queries if conn.acquired_for_connection_pool == 0\n release(f) if !async && conn.acquired_for_connection_pool == 0\n end\n end",
"def get_db_aggregation\n db_data_all = []\n aggregation = @thermostat.readings.pluck('Avg(temperature)', 'Min(temperature)', 'Max(temperature)', 'Avg(humidity)', 'Min(humidity)', 'Max(humidity)', 'Avg(battery_charge)', 'Min(battery_charge)', 'Max(battery_charge)').first\n unless aggregation.empty?\n db_data_all << {\"temperature\" => {\"avg\" => aggregation[0].round(2), \"min\" => aggregation[1], \"max\" => aggregation[2]}}\n db_data_all << {\"humidity\" => {\"avg\" => aggregation[3].round(2), \"min\" => aggregation[4], \"max\" => aggregation[5]}}\n db_data_all << {\"battery_charge\" => {\"avg\" => aggregation[6].round(2), \"min\" => aggregation[7], \"max\" => aggregation[8]}}\n end\n return db_data_all\n end",
"def average_karma\n QuestionsDatabase.get_first_value(<<-SQL, self.id)\n SELECT\n CAST(COUNT(question_likes.id) AS FLOAT) / \n COUNT(DISTINCT(questions.id)) AS avg_karma\n FROM\n questions\n LEFT JOIN\n question_likes\n ON\n question_likes.question_id = questions.id\n WHERE\n questions.author_id = ?\n SQL\n end",
"def average(column_name)\n calculate(:average, column_name)\n end",
"def average()\n if(@countN > 0) then\n return @sum / @countN ;\n else\n return 0.0 ;\n end\n end",
"def calculate\n\t\taverage_rank\n\t\tfound_percent\n\tend",
"def average\n return @@average_times.inject(:+).to_f / @@average_times.size\n end",
"def average_bid\n bids = @bids.map { |x| x.fetch(:price) }\n bids.inject(:+) / bids.count\n end",
"def average_karma\n average_karma = QuestionsDBConnection.instance.execute(<<-SQL, @id)\n SELECT\n COUNT(questions_id), COUNT(*)\n FROM\n users\n LEFT OUTER JOIN\n questions ON questions.users_id = users.id\n LEFT OUTER JOIN\n question_likes ON question_likes.questions_id = questions.id\n WHERE\n users.id = ?\n\n SQL\n\n number_of_questions_asked_by_user = average_karma.first.values[0]\n number_of_likes_on_questions = average_karma.first.values[1]\n\n number_of_likes_on_questions.fdiv(number_of_questions_asked_by_user)\n end",
"def perform\n qe = QueryExecutor.new(map, reduce,functions, query_id.to_s, filter)\n results = qe.execute\n result = Result.new results\n result.query = Query.find(query_id)\n result.save!\n end",
"def mean\n return 0.0 if @count.zero?\n return @sum / @count\n end",
"def mean\n end",
"def index\n result = []\n db_data = get_db_aggregation\n redis_cache = get_cache_aggregation\n\n if redis_cache.empty?\n result = db_data\n elsif db_data.empty?\n result = redis_cache\n else\n db_data.each_with_index do |val,i|\n val.each do |k,value|\n avg_val = (value[\"avg\"].to_f + redis_cache[i][k][\"avg\"].to_f) / 2\n min_val = [value[\"min\"].to_f, redis_cache[i][k][\"min\"].to_f].min\n max_val = [value[\"max\"].to_f, redis_cache[i][k][\"max\"].to_f].max\n result << {k => {\"avg\" => avg_val, \"min\" => min_val, \"max\" => max_val} }\n end \n end\n end\n\n render json: {thermostat_data: result}\n end",
"def get_avg(num_1, num_2)\n (num_1 + num_2) / 2\nend",
"def fastest_players_avg\n options = {\n :select => \"to_char(avg(instances.duration), '99999990.99') as field\",\n :order => 'field ASC'\n }\n\n render_users params[:name], options\n end",
"def find_average \n result = array.sum(0.0)/array.size\n return result\n end",
"def show\n @study = Study.find(params[:id]) \n @gpa = @study.progresses.where('stage = 3').select(\"AVG(score) AS gpa\").first.gpa\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @study }\n end\n end",
"def compute\n \tself.start_time = DateTime.now\n\n #call API request methods to fetch products\n\t\tApiRequestLog.process(self, endpoint)\n\n\t\tproducts = if category.present?\n\t\t\tProduct.where(category: category)\n\t\telse\n\t\t\tProduct.all\n\t\tend\n\n #compute average cubic weight\n\t\tif products.count > 0\n\t\t\t_total_cubic_weight = products.inject(0) {|r, p| r += p.volume.to_f * conversion_factor} \n\t\t\tself.average_cubic_weight = _total_cubic_weight.to_f / products.count \n\t\tend\n\n #update tracking data\n\t\tself.finished = true\n\t\tself.end_time = DateTime.now\n\t\tself.duration = end_time - start_time\n\t\tself.save(validate: false)\n\n #broadcast to client side to notify with calcualted average cubic weight of a related category\n if category.present?\n ActionCable.server.broadcast \"api_request_notifications_channel\", weight: \"#{average_cubic_weight} kg\", category: category\n else\n ActionCable.server.broadcast \"api_request_notifications_channel\", weight: \"#{average_cubic_weight} kg\"\n end\n\trescue\n end",
"def mean()\n sum = 0\n @difference.each do |item|\n sum += item\n end\n @mean = sum / @difference.length\n return @mean\n end",
"def getAverage(arg1 , arg2 , arg3, arg4)\n return (arg1+arg2+arg3+arg4)/4.0\n end",
"def average_for_attribute(\n attribute, include_domestic_consumption = true\n )\n attribute_type = attribute.class.name.demodulize.downcase\n value_table = \"node_#{attribute_type}s\"\n # rubocop:disable Layout/LineLength\n query = basic_query(attribute, include_domestic_consumption).\n select(\n \"nodes.id AS node_id\",\n \"AVG(#{value_table}.value) OVER (PARTITION BY #{value_table}.year) AS value\",\n \"#{value_table}.year\"\n )\n # rubocop:enable Layout/LineLength\n\n Node.from(\"(\" + query.to_sql + \") s\").\n select(\"s.*\").\n where(\"s.node_id\" => @node.id).\n order(nil)\n end",
"def average_age\n #already have all_followers to get all the followers for this cult.\n #just need to iterate through and grab the age then sum and count\n \n age = all_followers.map{|follower| follower.age}\n age.sum / age.count.to_f\n end",
"def get_aggregate aggregate_query\n ensure_not_closed!\n ensure_service!\n\n return enum_for :get_aggregate, aggregate_query unless block_given?\n\n results = service.run_aggregate_query aggregate_query.parent_path,\n aggregate_query.to_grpc,\n transaction: transaction_or_create\n results.each do |result|\n extract_transaction_from_result! result\n next if result.result.nil?\n yield AggregateQuerySnapshot.from_run_aggregate_query_response result\n end\n end",
"def mean\n stats.mean\n end",
"def game_ratio(nick)\n pa = $db[:player_action]\n udb = $db[:uno]\n\n games = udb.where(nick: nick).all[0]\n\n dataset = $db[\"SELECT count(game) FROM player_action pa where pa.action = 0 and pa.player = ?\n and exists (select game from player_action pa2 where pa.game = pa2.game and pa2.action = 0 and pa2.player like 'unobot%')\n and exists (select 1 from games where id = pa.game and end is not null)\", nick]\n puts dataset.all[0].to_a[0][1].to_f, games[:games].to_f\n dataset.all[0].to_a[0][1].to_f / games[:games].to_f\nend",
"def average\n #Split Temporary Variable value -> sum\n #sum = 0\n #Replace Loop with Collection Closure Method\n #@grades.each do |grade|\n #sum += grade\n #end\n #Split Temporary Variable value -> average\n #average = sum / @grades.size.to_f\n \n #Replace Loop with Collection Closure Method\n average = @grades.sum / @grades.size.to_f\n average\n end",
"def average_rating\n (ratings.sum(:rating).to_f / num_ratings).round(1)\nend",
"def run_eager\n root_operation = query.selected_operation\n root_op_type = root_operation.operation_type || \"query\"\n root_type = schema.root_type_for_operation(root_op_type)\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = @response\n runtime_object = root_type.wrap(query.root_value, context)\n runtime_object = schema.sync_lazy(runtime_object)\n\n if runtime_object.nil?\n # Root .authorized? returned false.\n @response = nil\n else\n call_method_on_directives(:resolve, runtime_object, root_operation.directives) do # execute query level directives\n gathered_selections = gather_selections(runtime_object, root_type, root_operation.selections)\n # This is kind of a hack -- `gathered_selections` is an Array if any of the selections\n # require isolation during execution (because of runtime directives). In that case,\n # make a new, isolated result hash for writing the result into. (That isolated response\n # is eventually merged back into the main response)\n #\n # Otherwise, `gathered_selections` is a hash of selections which can be\n # directly evaluated and the results can be written right into the main response hash.\n tap_or_each(gathered_selections) do |selections, is_selection_array|\n if is_selection_array\n selection_response = GraphQLResultHash.new(nil, nil, false)\n final_response = @response\n else\n selection_response = @response\n final_response = nil\n end\n\n @dataloader.append_job {\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = selection_response\n # This is a less-frequent case; use a fast check since it's often not there.\n if (directives = selections[:graphql_directives])\n selections.delete(:graphql_directives)\n end\n call_method_on_directives(:resolve, runtime_object, directives) do\n evaluate_selections(\n runtime_object,\n root_type,\n root_op_type == \"mutation\",\n selections,\n selection_response,\n final_response,\n nil,\n )\n end\n }\n end\n end\n end\n delete_all_interpreter_context\n nil\n end",
"def average_rating(&block)\n total = 0\n countable_ratings = block ? ratings.select(&block) : ratings\n countable_ratings.each do |rating|\n total += rating.rating\n end\n average_rating = countable_ratings.empty? ? 0 : (0.0 + total) / countable_ratings.size\n # only 1 decimal.\n [(0.0 + (average_rating * 10).round) / 10, countable_ratings.size]\n end"
] | [
"0.5835753",
"0.547055",
"0.5469689",
"0.54238045",
"0.52602386",
"0.52592427",
"0.52137923",
"0.52137923",
"0.52137923",
"0.5198025",
"0.51939225",
"0.51723516",
"0.51477414",
"0.5140449",
"0.51170796",
"0.51098645",
"0.50977683",
"0.5089294",
"0.50892437",
"0.5078895",
"0.5077401",
"0.50724703",
"0.5068384",
"0.50679713",
"0.506457",
"0.50496143",
"0.5023374",
"0.5017445",
"0.50032204",
"0.4999779",
"0.49987853",
"0.4988147",
"0.49870655",
"0.49841416",
"0.49757585",
"0.49721912",
"0.495517",
"0.4942799",
"0.493194",
"0.49249196",
"0.49187946",
"0.491106",
"0.49005055",
"0.4896179",
"0.4890151",
"0.48837304",
"0.4874725",
"0.48720446",
"0.48533034",
"0.48531544",
"0.48505554",
"0.4840033",
"0.48329568",
"0.48286515",
"0.48230603",
"0.48016036",
"0.47985378",
"0.47899386",
"0.47862077",
"0.47748715",
"0.4774421",
"0.477204",
"0.47716698",
"0.4752984",
"0.47457469",
"0.47393662",
"0.47389132",
"0.47379482",
"0.4725643",
"0.4723315",
"0.47165722",
"0.4712618",
"0.47053474",
"0.47030717",
"0.46989185",
"0.4697362",
"0.46957588",
"0.46941903",
"0.46940163",
"0.46887234",
"0.46874115",
"0.46865803",
"0.46863037",
"0.4685268",
"0.4681547",
"0.4674695",
"0.4670345",
"0.4668752",
"0.46687117",
"0.46653998",
"0.46653226",
"0.46591598",
"0.4657617",
"0.46545613",
"0.46541646",
"0.4648421",
"0.464667",
"0.46465597",
"0.4636434",
"0.46352965"
] | 0.72706807 | 0 |
Calculates the minimum value on a given column. The value is returned with the same data type of the column, or +nil+ if there's no row. See calculate for examples with options. Person.minimum(:age) => 7 | def minimum(column_name)
calculate(:minimum, column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def min(column)\n _all_with_present_column(column).min\n end",
"def min(args)\n col = column field: args[:field]\n col.map! {|item| item = item.to_f} \n col.min\n end",
"def min(attr)\n column(attr).min\n end",
"def get_min()\n @data.min\n end",
"def minimum(attribute_name, options = {})\n calculate(:min, attribute_name, options)\n end",
"def min (row_num)\n row = @rows[row_num]\n min = row[0]\n row.each do |num|\n if min == 0 then\n min = num\n end\n if (num < min) && (num != 0) then\n min = num\n end\n end\n return min\n end",
"def smallest_column\n # Slow but concise version of this method:\n #return columns.min_by &:size\n\n column = smallest = right\n min_size = column.size\n while true\n column = column.right\n return smallest if column == self\n \n if column.size < min_size\n smallest, min_size = column, column.size\n return smallest if min_size == 0\n end\n end\n end",
"def find_min()\r\n self.min\r\n end",
"def min(field = nil)\n block_given? ? super() : aggregates(field)[\"min\"]\n end",
"def min\n min = get(0,0)\n\tfor i in 0...@filas\n for j in 0...@columnas\n if (get(i,j) < min)\n min = get(i,j)\n end\n end\n end\n min\n end",
"def minimum\n object.minimum.to_f\n end",
"def min\n @min || 0;\n end",
"def minimum_value\n @minimum_value || store.min\n end",
"def async_minimum(column_name)\n async.minimum(column_name)\n end",
"def get_min()\n @min\n end",
"def min\n to_a.min\n end",
"def get_min\n @min ||= calculate_min\n end",
"def min(field, opts={})\n opts = ::Hashie::Mash.new(opts)\n all(opts).inject(nil) do |min, item|\n val = item.send(field)\n min = val if !val.nil? && (min.nil? || val < min)\n min\n end\n end",
"def casted_minimum\n minimum.blank? ? nil : (qtype_name == \"decimal\" ? minimum : minimum.to_i)\n end",
"def get_min()\n @min \n end",
"def get_min()\n end",
"def get_min\n @min\n end",
"def min\n return super if super.nil?\n (numeric_type == 'Integer') ? super.to_i : super.to_f\n end",
"def min() end",
"def min\n @v.compact.min\n rescue\n nil\n end",
"def validation_min\n validation = validations? && validations.find do |validation|\n validation.kind == :numericality\n end\n\n if validation\n # We can't determine an appropriate value for :greater_than with a float/decimal column\n raise IndeterminableMinimumAttributeError if validation.options[:greater_than] && column? && [:float, :decimal].include?(column.type)\n\n if validation.options[:greater_than_or_equal_to]\n return (validation.options[:greater_than_or_equal_to].call(object)) if validation.options[:greater_than_or_equal_to].kind_of?(Proc)\n return (validation.options[:greater_than_or_equal_to])\n end\n\n if validation.options[:greater_than]\n return (validation.options[:greater_than].call(object) + 1) if validation.options[:greater_than].kind_of?(Proc)\n return (validation.options[:greater_than] + 1)\n end\n end\n end",
"def min\n only_with('min', 'NilClass', 'Numeric', 'String', 'DateTime')\n items.compact.min\n end",
"def lowest_student_gpa\n \"SELECT MIN(gpa) FROM students\"\nend",
"def minimum\n return @minimum\n end",
"def min_value\n if @head.nil?\n return nil\n else\n if head.left\n min_value = min_search(head.left).data\n else\n min_value = head.data\n end\n end\n return min_value\n end",
"def find_minimum_value\n if self.left_child\n self.left_child.find_minimum_value\n else\n self.value\n end\n end",
"def min\n if min_element.kind_of? Array\n min_element.first\n else\n min_element\n end\n end",
"def odb_min\n \"min(#{to_s})\"\n end",
"def find_min_value(array)\n return array.min\nend",
"def min(&block)\n flag = true # 1st element?\n result = nil\n self.each{|*val|\n val = val.__svalue\n if flag\n # 1st element\n result = val\n flag = false\n else\n if block\n result = val if block.call(val, result) < 0\n else\n result = val if (val <=> result) < 0\n end\n end\n }\n result\n end",
"def min\n return @t_min\n end",
"def min; end",
"def min; end",
"def min\n @store.peek[:min] unless empty?\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def get_min()\n # smallest = Float::INFINITY\n # @stack.each do |item|\n # if item < smallest\n # smallest = item\n # end\n # end\n # return smallest\n @stack.min\n end",
"def min\n # Establecemos valor del primer elemento\n min = @matrix[0][0]\n i = 0\n \n # Fila a fila actualizando el valor minimo\n while (i < @fil)\n j = 0\n while (j < @col)\n if (@matrix[i][j] < min)\n min = @matrix[i][j]\n end\n j += 1\n end\n i += 1\n end\n min\n end",
"def minimum(arr)\n m = arr.min\n m\n end",
"def min\n end",
"def min\n end",
"def min(field)\n grouped(:min, field.to_s, Javascript.min)\n end",
"def better_my_min\n min = self.first\n self.each do |el|\n min = el if el < min\n end\n min\n end",
"def minimum_value\n current = self\n \n while current.left_child != nil do\n current = current.left_child\n end\n \n return current.data\n end",
"def min( value )\n if value < self\n value\n else\n self\n end\n end",
"def my_min(array)\n\t\n\tarray.min\n\t\nend",
"def min_point\n # generate the bounding box if not already done\n bounding_box\n # return the min\n @min\n end",
"def find_min\r\n return nil if !@head\r\n cursor = @head\r\n min = cursor.data\r\n while cursor\r\n if cursor.data < min\r\n min = cursor.data\r\n end\r\n cursor = cursor.next\r\n end\r\n return min\r\n end",
"def min_element\n self.to_a.min\n end",
"def smallest\n # self.compact.sort.first\n self.compact.min\n end",
"def t_min\n @temp.min\n end",
"def find_minimum\n current = @root\n\n while current.left && !current.nil?\n current = current.left\n return current.value if current.left.nil?\n end\n end",
"def min(value)\n opts[:min] = value\n end",
"def min\n\t\tm = self.valor[0]\n\t\tfor i in (0...self.valor.size.to_i)\n\t\t\t\tif (self.valor[i]< m)\n\t\t\t\t\tm = self.valor[i]\n\t\t\t\tend\n\t\tend\n\t\treturn m\n\tend",
"def visit_axiom_aggregate_minimum(minimum)\n # TODO: wrap this in a coalesce operation once the default can be made sane\n unary_prefix_operation_sql(MINIMUM, minimum)\n end",
"def find_min\n loc = find_min_locator and loc.value\n end",
"def select_youngest_bear_and_returns_name_and_age\n 'SELECT min(bears.name),(bears.age) FROM bears ORDER BY age ASC LIMIT 1'\nend",
"def min(field)\n determine(field, :<=)\n end",
"def minimum\n Minimum.new(self)\n end",
"def find_min(node)\n return node if node.left.nil?\n find_min(node.left)\n end",
"def min\n self.class.min\n end",
"def my_min(arr)\n smallest = nil\n\n arr.each do |n|\n smallest = n if smallest.nil? || n < smallest\n end\n\n smallest\nend",
"def min\n min = @m[0][0]\n for i in 0...fi\n for k in 0...co\n if @m[i][k] < min then\n min=@m[i][k]\n end\n end\n end \n min\n end",
"def min\n\n minimo = 0.to_f\n for i in 0...matriz.size \n if matriz[i] != nil \n matriz[i].each do |key, value|\n if matriz[i][key].to_f < minimo\n minimo = matriz[i][key].to_f\n end\n end\n end \n end\n minimo\n\tend",
"def min\n dates.first\n end",
"def minvalue\n MINVALUE\n end",
"def min\n\t\tm = self.mat[0][0]\n\t\tfor i in (0...@fil.to_i)\n\t\t\t#for j in (0...@col.to_i)\n\t\t\t@col.to_i.times do |j| \n\t\t\t\tif (self.mat[i][j] < m)\n\t\t\t\t\tm = self.mat[i][j]\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn m\n\tend",
"def min\r\n @range.min\r\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def argmin(input, axis = nil, name: nil, dimension: nil, output_type: :int32)\n _op(:argmin, input, nil, axis: axis, name: name, dimension: dimension, data_type: output_type)\n end",
"def min\n self.reduce('lambda{|memo, item| memo < item ? memo : item }')\n end",
"def get_first_value( sql, *bind_vars )\n execute( sql, *bind_vars ) { |row| return row[0] }\n nil\n end",
"def get_min()\n @array[-1][1]\n end",
"def min(*x, &block)\n return x.first if x.size == 1\n return min2(x[0], x[1], &block) if x.size == 2\n a = x.first\n (1...x.size).each { |b| \n a = min2(a,x[b], &block) }\n a\n end",
"def min_c_cell\n @min_c_cell ||= Matrices::Cell.new(c.min_index)\n end",
"def my_min2(array)\n lowest_val = Float::INFINITY\n array.each do |el|\n lowest_val = el if el < lowest_val\n end\n\n lowest_val\nend",
"def min()\n return stack.stack.last[:min]\n end",
"def min\n min = @valor[0][0]\n i = 0\n self.fil.times do |i|\n j = 0\n self.col.times do |j|\n if (@valor[i][j] < min)\n min = @valor[i][j]\n end\n j=j+1\n end\n i=i+1\n\tend\n min\n end",
"def find_min_value(array)\n answer = array.sort!\n answer[0]\nend",
"def get_min()\n # O(1) time\n @min.last\n end",
"def my_min(list)\n smallest_num = nil\n list.each do |num|\n if smallest_num == nil || smallest_num > num\n smallest_num = num\n end\n end\n smallest_num\nend",
"def min(key)\n key = find(key) if !key.is_a? Node\n return key if key.left.nil? && key.right.nil?\n min(key.left)\n end",
"def min_by_key(key, students)\n first_by_key(key, 'asc', students)\nend",
"def get_min(pq)\n\t\tmin_distance = nil\n\t\tmin_key = nil\n\t\tif (pq.size > 0)\n\t\t\tmin_distance = pq[pq.keys.first]\n\t\t\tmin_key = pq.keys.first\n\t\t\tpq.each do |k,v|\n\t\t\t\tif (v && (!min_distance || v <= min_distance))\n\t\t\t\t\tmin_distance = v\n\t\t\t\t\tmin_key = k\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn min_key\n\tend",
"def least\n nil\n end",
"def lowest_rating\n Show.minimum(:rating)\nend",
"def good_my_min(arr)\n smallest = arr.first\n arr.each do |el|\n smallest = el if el < smallest\n end\n smallest\nend",
"def autosizedMinimumOutdoorAirFlowRate\n\n result = OpenStudio::OptionalDouble.new()\n\n name = self.name.get.upcase\n \n model = self.model\n \n sql = model.sqlFile\n \n if sql.is_initialized\n sql = sql.get\n \n query = \"SELECT Value \n FROM tabulardatawithstrings \n WHERE ReportName='ComponentSizingSummary' \n AND ReportForString='Entire Facility' \n AND TableName='Controller:OutdoorAir' \n AND RowName='#{name}' \n AND ColumnName='Minimum Outdoor Air Flow Rate' \n AND Units='m3/s'\"\n \n val = sql.execAndReturnFirstDouble(query)\n \n if val.is_initialized\n result = OpenStudio::OptionalDouble.new(val.get)\n end\n \n end\n\n return result\n \n end",
"def min(list)\n list.min\nend",
"def min(cat)\n @categories[cat][:min]\n end",
"def min(a,b)\n a < b ? a : b\n end",
"def minimum=(value)\n @minimum = value\n end",
"def find_smallest_int(arr)\n return arr.min\nend"
] | [
"0.80278295",
"0.79727817",
"0.79599094",
"0.70214087",
"0.70161986",
"0.6862306",
"0.67386323",
"0.6722003",
"0.6711711",
"0.6696324",
"0.66915125",
"0.663722",
"0.6621367",
"0.6616968",
"0.6611135",
"0.65922457",
"0.65894884",
"0.6576366",
"0.65665114",
"0.6523344",
"0.65079993",
"0.6456523",
"0.6454274",
"0.6431403",
"0.6397178",
"0.63830405",
"0.63586855",
"0.6322651",
"0.6294151",
"0.6279967",
"0.62594956",
"0.62393296",
"0.62388986",
"0.62295526",
"0.6208904",
"0.6199498",
"0.6196145",
"0.6196145",
"0.6191901",
"0.61909604",
"0.61909604",
"0.6158465",
"0.6156822",
"0.6107739",
"0.61064875",
"0.61064875",
"0.6092126",
"0.6084365",
"0.6076848",
"0.60733455",
"0.6019777",
"0.60005486",
"0.59956527",
"0.59913117",
"0.5958945",
"0.5940913",
"0.5933493",
"0.59313303",
"0.5925936",
"0.5907352",
"0.5853073",
"0.5844475",
"0.5842229",
"0.5834207",
"0.5826301",
"0.5812342",
"0.5806212",
"0.5804252",
"0.5773364",
"0.57675326",
"0.5763941",
"0.57556546",
"0.574317",
"0.57382923",
"0.57382923",
"0.57382923",
"0.5726544",
"0.5693165",
"0.5684049",
"0.5682619",
"0.5678424",
"0.567657",
"0.566643",
"0.5632499",
"0.563064",
"0.56224227",
"0.56105477",
"0.5590304",
"0.5590111",
"0.55872583",
"0.5583722",
"0.55719936",
"0.5571724",
"0.5566085",
"0.555844",
"0.55501294",
"0.55435514",
"0.55416745",
"0.55412734",
"0.5533157"
] | 0.8268641 | 0 |
Same as minimum but perform the query asynchronously and returns an ActiveRecord::Promise | def async_minimum(column_name)
async.minimum(column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_min()\r\n self.min\r\n end",
"def get_min()\n # O(1) time\n @min.last\n end",
"def lowest_student_gpa\n \"SELECT MIN(gpa) FROM students\"\nend",
"def oldest(constraints = {})\n constraints.merge!(order: :created_at.asc)\n _q = query(constraints)\n _q.define_singleton_method(:method_missing) { |m, *args, &block| self.results.send(m, *args, &block) }\n _q\n end",
"def get_min\n @min ||= calculate_min\n end",
"def first **args\n query( **( { order: \"@rid\" , limit: 1 }.merge args)).execute(reduce: true)\n\tend",
"def get_min()\n end",
"def min() end",
"def find_min\r\n return nil if !@head\r\n cursor = @head\r\n min = cursor.data\r\n while cursor\r\n if cursor.data < min\r\n min = cursor.data\r\n end\r\n cursor = cursor.next\r\n end\r\n return min\r\n end",
"def earliest(key, min)\n _request_id, time = @store.zrangebyscore(key, \"(#{min}\", '+inf', withscores: true, limit: [0, 1]).first\n time\n end",
"def min(field = nil)\n block_given? ? super() : aggregates(field)[\"min\"]\n end",
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def earliest!\n earliest_order!.first\n end",
"def find_min\n loc = find_min_locator and loc.value\n end",
"def my_min2 # O(n) time complexity\n smallest = self.first\n self.each do |num|\n sleep(1)\n smallest = num if num < smallest \n end\n smallest\n end",
"def best_ask\n @asks.min_by { |x| x.fetch(:price) }\n end",
"def async_lookup(args)\n unless (Hash === args)\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all do |rows|\n yield(rows.any? ? rows.first : nil)\n end\n\n return\n end",
"def earliest\n return self.transactions.first(:order => [:date.asc])\n end",
"def sync\n raise \".sync can only be used on the client\" if Volt.client?\n\n result = nil\n error = nil\n\n self.then do |val|\n result = val\n end.fail do |err|\n error = err\n end\n\n if error\n err_str = \"Exception in Promise at .sync: #{error.inspect}\"\n err_str += error.backtrace.join(\"\\n\")\n Volt.logger.error(err_str)\n fail error\n else\n return result\n end\n end",
"def async_maximum(column_name)\n async.maximum(column_name)\n end",
"def minimum(project_id, event_collection, target_property, options = {})\n options[:event_collection] = event_collection\n options[:target_property] = target_property\n\n resource \"projects/#{project_id}/queries/minimum\", options\n end",
"def demand(promise)\n if promise.respond_to? :__result__\n promise.__result__\n else\n promise\n end\n end",
"def get_min()\n @data.min\n end",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def minmax(run)\n db = SQLite3::Database.open \"rand_int.db\" #open Database\n results = db.get_first_row \"SELECT * FROM Random WHERE entry=#{run}\" #get only first row of result set\n rescue SQLite3::Exception => e \n \n puts \"Exception occured\"\n puts e\n \n ensure\n db.close if db\n\n return \"Min: #{results[2]}, Max: #{results[3]}\"\nend",
"def minimum(column_name)\n calculate(:minimum, column_name)\n end",
"def better_my_min\n min = self.first\n self.each do |el|\n min = el if el < min\n end\n min\n end",
"def get_min()\n @min \n end",
"def min\n @store.peek[:min] unless empty?\n end",
"def cheapest(products)\n products.min_by do |product|\n product[:price]\n end\nend",
"def min(column)\n _all_with_present_column(column).min\n end",
"def earliest\n earliest_order.first\n end",
"def fetch_and_enqueue(force_all_syncs=false)\n begin\n if force_all_syncs\n query = db.fetch(%Q(\n SELECT r.name, r.id FROM #{relation} r, users u WHERE\n (r.state = '#{CartoDB::Synchronization::Member::STATE_SUCCESS}'\n OR r.state = '#{CartoDB::Synchronization::Member::STATE_SYNCING}')\n AND u.id = user_id AND u.state = '#{Carto::User::STATE_ACTIVE}'\n ))\n else\n query = db.fetch(%Q(\n SELECT r.name, r.id, r.user_id FROM #{relation} r, users u\n WHERE EXTRACT(EPOCH FROM r.run_at) < #{Time.now.utc.to_f}\n AND u.id = user_id AND u.state = '#{Carto::User::STATE_ACTIVE}'\n AND\n (\n r.state = '#{CartoDB::Synchronization::Member::STATE_SUCCESS}'\n OR (r.state = '#{CartoDB::Synchronization::Member::STATE_FAILURE}'\n AND r.retried_times < #{CartoDB::Synchronization::Member::MAX_RETRIES})\n )\n ORDER BY ran_at\n ))\n end\n success = true\n rescue Exception => e\n success = false\n print_log(\"ERROR fetching sync tables: #{e.message}, #{e.backtrace}\", true)\n end\n\n if success\n print_log \"Fetched #{query.count} records\"\n force_all_syncs ? enqueue_all(query) : enqueue_rate_limited(query)\n end\n\n self\n end",
"def my_min_once\n min = first\n each do |num|\n if num < min\n min = num\n end\n end\n min\n end",
"def find_min_locator\n return nil if empty?\n use_min\n mode_call(:find_min_loc)\n end",
"def smallest_task(tasks)\n tasks.min_by { |task| task.arrival_time }\n end",
"def select_youngest_bear_and_returns_name_and_age\n 'SELECT min(bears.name),(bears.age) FROM bears ORDER BY age ASC LIMIT 1'\nend",
"def get_min()\n @min\n end",
"def min_record\n min = record(infimum.next)\n min if min != supremum\n end",
"def query(sql, *bind_values)\n @pool.acquire { |conn| conn.query(sql, *bind_values).first }\n end",
"def get_min\n @min\n end",
"def earliest; all(:order => [:started_at.asc, :id.asc ]) end",
"def first(n=1)\n query(@sql + ' LIMIT ' + n.to_s, cache: false)\n end",
"def index\n @posts = Post.joins(:post_detail).select('posts.*, post_details.*').where('post_details.price = (SELECT MIN(post_details.price) FROM post_details)').all\n end",
"def first(n=1)\n return values[0] if self.class == BaseRelation && loaded && n == 1\n result = limit(n).load\n result.length == 1 ? result[0] : result\n end",
"def min\n @v.compact.min\n rescue\n nil\n end",
"def min\n self.reduce('lambda{|memo, item| memo < item ? memo : item }')\n end",
"def first\n results.first\n end",
"def min()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Functions::Min::MinRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def first\n return sync { @first }\n end",
"def query_wait sql, waiting_time = 10\n result = db[sql].all\n if result.empty?\n if waiting_time != 0\n sleep 1\n result = query_wait(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def min(args)\n col = column field: args[:field]\n col.map! {|item| item = item.to_f} \n col.min\n end",
"def execute(input_set = nil)\n resp = super(input_set)\n results = ClosestResultSet.new(resp)\n return results\n end",
"def min; end",
"def min; end",
"def min\n end",
"def min\n end",
"def min\n to_a.min\n end",
"def min\n return @t_min\n end",
"def each_record\n return enum_for(:each_record) unless block_given?\n\n c = record_cursor(:min)\n\n while (rec = c.record)\n yield rec\n end\n\n nil\n end",
"def first_sync_entity(entity_name, organization, connec_client, external_client, last_synchronization_date, opts, external = true)\n limit = Settings.first_sync_batch_size || 50\n skip = 0\n entities_count = limit\n last_first_record = nil\n\n h = {__limit: limit}\n external ? h[:__skip_connec] = true : h[:__skip_external] = true\n entity_instance = instanciate_entity(entity_name, organization, connec_client, external_client, opts.merge(h))\n\n # IF entities_count > limit\n # This first sync feature is probably not implemented in the connector\n # because it fetched more than the expected number of entities\n # No need to fetch it a second Time\n # ELSIF entities_count < limit\n # No more entities to fetch\n while entities_count == limit\n entity_instance.opts_merge!(__skip: skip)\n\n perform_hash = perform_sync(entity_instance, last_synchronization_date, external)\n entities_count = perform_hash[:count]\n\n # Safety: if the connector does not implement batched calls but has exactly limit entities\n # There is a risk of infinite loop\n # We're comparing the first record to check that it is different\n first_record = Digest::MD5.hexdigest(perform_hash[:first].to_s)\n break if last_first_record && first_record == last_first_record\n\n last_first_record = first_record\n\n skip += limit\n end\n end",
"def query_single(sql, *params)\n results = run(sql, *params)\n results.each(as: :array, :first => true).first\n end",
"def get_min\n minimum = @store.pop\n until @store.empty?\n element = @store.pop\n if minimum > element\n minimum = element\n end\n end\n minimum\n end",
"def smallest\n # self.compact.sort.first\n self.compact.min\n end",
"def minimum\n Minimum.new(self)\n end",
"def find_min_value(array)\n return array.min\nend",
"def lowest_bid\n bids.lowest_bid\n end",
"def min(list)\n list.min\nend",
"def find(search_string)\n result = search(search_string)\n result.min\n end",
"def lowest_stock # A refactoriser via méthode SQL, on peut faire beaucoup plus court\n # Pour chaque produit récupérer la remaining quantity\n # Vérifier si cette valeur est inférieure à un critère donné\n # Renvoyer la liste de tous les produits concernés\n @products = Product.all\n @products = @products.sort_by do |product|\n product.total_remaining_quantity\n end\n low_stock_trigger = 3 # Plancher de quantité qui trigger l'alimentation de la liste des low_stocks\n low_stock_list = Array.new\n @products.each do |product|\n if product.total_remaining_quantity < low_stock_trigger\n low_stock_list << product\n end\n end\n return low_stock_list.first # Pour l'instant on ne renvoit qu'un item pour qu'on ait une seule notif pour stock bas\n end",
"def min_by_key(key, students)\n first_by_key(key, 'asc', students)\nend",
"def first(&block)\n args = limit(1).include_docs.query\n\n end",
"def my_min_fast(arr)\n smallest = arr[0]\n arr.each do |ele|\n if ele < smallest\n smallest = ele\n end\n end\n return smallest\nend",
"def min\n dates.first\n end",
"def result_for_query(query)\n results_for_query(query).first\n end",
"def min\n @range.begin\n end",
"def min(&block)\n flag = true # 1st element?\n result = nil\n self.each{|*val|\n val = val.__svalue\n if flag\n # 1st element\n result = val\n flag = false\n else\n if block\n result = val if block.call(val, result) < 0\n else\n result = val if (val <=> result) < 0\n end\n end\n }\n result\n end",
"def minimum_value\n @minimum_value || store.min\n end",
"def select_first!\n limit(1).select!.first\n end",
"def min(attr)\n column(attr).min\n end",
"def earliest_mpost\n earliest = nil\n block = Proc.new {|cluster|\n mpost = cluster.earliest_mpost\n if mpost \n earliest = mpost if (earliest == nil ||\n earliest.trigger_time > mpost.trigger_time)\n end\n }\n raw_clusters.each(&block)\n processed_clusters.each(&block)\n return earliest\n end",
"def min\n self.class.min\n end",
"def minimum(attribute_name, options = {})\n calculate(:min, attribute_name, options)\n end",
"def eqp_r_min(item)\n end",
"def query_empty sql, waiting_time = 10\n result = db[sql].all\n if !result.empty?\n if waiting_time != 0\n sleep 1\n result = query(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def get_min()\n # smallest = Float::INFINITY\n # @stack.each do |item|\n # if item < smallest\n # smallest = item\n # end\n # end\n # return smallest\n @stack.min\n end",
"def query(sql)\n if NB.neverblocking? && NB.reactor.running?\n send_query sql\n NB.wait(:read, IO.new(socket))\n get_result\n else\n super(sql)\n end\n end",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def least_duplicates\r\n smallest = 1000\r\n @schedule_datasets.each do |ds|\r\n smallest = ds.most_duplicates if ds.most_duplicates < smallest\r\n end\r\n return smallest\r\n end",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def min_bid(entity)\n return unless entity\n return starting_bid(entity) if @bids[entity].empty?\n\n high_bid = highest_bid(entity)\n (high_bid.price || entity.min_bid) + min_increment\n end",
"def pick(*column_names)\n if loaded? && all_attributes?(column_names)\n result = records.pick(*column_names)\n return @async ? Promise::Complete.new(result) : result\n end\n\n limit(1).pluck(*column_names).then(&:first)\n end",
"def my_min(array)\n\t\n\tarray.min\n\t\nend",
"def minimum\n return @minimum\n end",
"def min_point\n # generate the bounding box if not already done\n bounding_box\n # return the min\n @min\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def delay_query\n @time ||= Time.now\n delay = 0.5\n wait_time = delay - (Time.now - @time).to_f\n sleep(wait_time) unless wait_time <= 0\n x = yield\n @time = Time.now\n return x\n end",
"def smallest_number(num1, num2)\n result = [num1, num2].min\n return result\nend",
"def minimum\n object.minimum.to_f\n end"
] | [
"0.5744234",
"0.5733445",
"0.5683802",
"0.5457232",
"0.54498786",
"0.5439021",
"0.54343444",
"0.5344327",
"0.5337682",
"0.5315812",
"0.5305487",
"0.5303429",
"0.5288761",
"0.52610934",
"0.5255177",
"0.52475387",
"0.52416587",
"0.523488",
"0.5215397",
"0.5209032",
"0.51844305",
"0.51817775",
"0.518003",
"0.51675904",
"0.51665956",
"0.51410645",
"0.51355535",
"0.5134905",
"0.5124632",
"0.5115389",
"0.5111161",
"0.5108535",
"0.51053464",
"0.51038045",
"0.5081001",
"0.5079125",
"0.5060048",
"0.5057662",
"0.50390667",
"0.49701786",
"0.49694157",
"0.49687883",
"0.49647868",
"0.4952921",
"0.49468216",
"0.49427837",
"0.49312413",
"0.49076578",
"0.49041462",
"0.49040163",
"0.4903688",
"0.4899132",
"0.48744968",
"0.4860806",
"0.4860806",
"0.48549557",
"0.48549557",
"0.4854786",
"0.48543566",
"0.48459467",
"0.48367167",
"0.4830644",
"0.48217776",
"0.48181525",
"0.48156512",
"0.48138314",
"0.48130724",
"0.48069203",
"0.4803153",
"0.4802477",
"0.47979978",
"0.4793497",
"0.47860464",
"0.47798422",
"0.47710764",
"0.47703293",
"0.47643286",
"0.47616282",
"0.4748295",
"0.47390404",
"0.47356337",
"0.47344664",
"0.4714566",
"0.471181",
"0.4710163",
"0.4704923",
"0.46985424",
"0.46962437",
"0.4695751",
"0.46936306",
"0.46913645",
"0.46835062",
"0.4678644",
"0.46779034",
"0.46777824",
"0.46705633",
"0.46705633",
"0.4664894",
"0.4664841",
"0.4639903"
] | 0.7292647 | 0 |
Calculates the maximum value on a given column. The value is returned with the same data type of the column, or +nil+ if there's no row. See calculate for examples with options. Person.maximum(:age) => 93 | def maximum(column_name)
calculate(:maximum, column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def max(attr)\n column(attr).max\n end",
"def max(args)\n col = column field: args[:field]\n col.map! {|item| item = item.to_f} \n col.max\n end",
"def max(column)\n _all_with_present_column(column).max\n end",
"def max_value(table_name, column_name)\n result = @client.query(\"SELECT MAX(`#{column_name}`) FROM `#{table_name}`;\")\n\n result.first.values[0] || 0\n end",
"def column_max( table, column )\n\tmax = -9999999\n\t(0...(column + 2)).each do |x|\n\t\tif table[x][column] != nil\n\t\t\tif table[x][column] > max\n\t\t\t\tmax = table[x][column]\n\t\t\tend\n\t\tend\n\tend\n\n\treturn max\nend",
"def maximum\n object.maximum.to_f\n end",
"def max\n if valid?\n max_value\n end\n end",
"def biggest_column\n @cols.max_by { |col| col.rows.length }\n end",
"def max_column\n @columns&.max_by(&:size)\n end",
"def col_max() return 2 end",
"def max\n cells.compact.max\n end",
"def col_max() 2 end",
"def maximum (value = nil)\n\t\tif value\n\t\t\traise_if_error C.glyr_opt_number(to_native, value)\n\t\telse\n\t\t\tto_native[:number]\n\t\tend\n\tend",
"def max\n max = get(0,0)\n for i in 0...@filas\n for j in 0...@columnas\n if (get(i,j) > max)\n max = get(i,j)\n end\n end\n end\n max\n end",
"def max_value\n max = 0\n\n @matrix.each do |row|\n row_max = row.max\n max = row_max if row_max > max\n end\n\n max\n end",
"def find_max()\r\n self.max\r\n end",
"def maximum(attribute_name, options = {})\n calculate(:max, attribute_name, options)\n end",
"def max\n @data[0]\n end",
"def maximum_value\n @maximum_value || store.max\n end",
"def get_max\n @max ||= calculate_max\n end",
"def odb_max\n \"max(#{to_s})\"\n end",
"def max\n @v.compact.max\n rescue\n nil\n end",
"def max(field = nil)\n block_given? ? super() : aggregates(field)[\"max\"]\n end",
"def max\n to_a.max\n end",
"def largest_value\n values.max\n end",
"def max\n data.max\n end",
"def maxcol\n data.cols\n end",
"def max(field, opts={})\n opts = ::Hashie::Mash.new(opts)\n all(opts).inject(nil) do |max, item|\n val = item.send(field)\n max = val if !val.nil? && (max.nil? || val > max)\n max\n end\n end",
"def max\n return super if super.nil?\n (numeric_type == 'Integer') ? super.to_i : super.to_f\n end",
"def validation_max\n validation = validations? && validations.find do |validation|\n validation.kind == :numericality\n end\n if validation\n # We can't determine an appropriate value for :greater_than with a float/decimal column\n raise IndeterminableMaximumAttributeError if validation.options[:less_than] && column? && [:float, :decimal].include?(column.type)\n\n if validation.options[:less_than_or_equal_to]\n return (validation.options[:less_than_or_equal_to].call(object)) if validation.options[:less_than_or_equal_to].kind_of?(Proc)\n return (validation.options[:less_than_or_equal_to])\n end\n\n if validation.options[:less_than]\n return ((validation.options[:less_than].call(object)) - 1) if validation.options[:less_than].kind_of?(Proc)\n return (validation.options[:less_than] - 1)\n end\n end\n end",
"def max\n\t\t@max || nil\n\tend",
"def max return_type=:stored_type\n max_value = @vector.max\n if return_type == :vector\n Daru::Vector.new({index_of(max_value) => max_value}, name: @name, dtype: @dtype)\n else\n max_value\n end\n end",
"def max_allowed_value(table_name, column_name)\n definition = @client.query(\"DESC `#{table_name}` `#{column_name}`\").first\n max_allowed_value_per_type(definition[\"Type\"])\n end",
"def find_max_value (array)\n array.max # Add your solution here\nend",
"def get_max()\n end",
"def get_max\n @max\n end",
"def max; end",
"def max; end",
"def col_max; 1; end",
"def find_max_value(array)\n array.max \nend",
"def async_maximum(column_name)\n async.maximum(column_name)\n end",
"def max\n end",
"def max\n end",
"def maximum\n return self[0]\n end",
"def column_max_index( table, column )\n\tmax = -9999999\n\tindex = 0\n\t(0...(column + 2)).each do |x|\n\t\tif table[x][column] != nil\n\t\t\tif table[x][column] > max\n\t\t\t\tmax = table[x][column]\n\t\t\t\tindex = x\n\t\t\tend\n\t\tend\n\tend\n\treturn index\nend",
"def max\n only_with('max', 'NilClass', 'Numeric', 'String', 'DateTime')\n items.compact.max\n end",
"def max_value\n if @head.nil?\n return nil\n else\n if head.right\n max_value = max_search(head.right).data\n else\n max_value = head.data\n end\n end\n return max_value\n end",
"def custon_max(arr)\n return nil if arr.empty?\n\n max = arr[0]\n arr.each do |value|\n max = value if value > max\n end\n max\nend",
"def find_max_value(array)\n return array.max\nend",
"def maximum\n return @maximum\n end",
"def col_max\n return MAX_CATEGORY\n end",
"def max \n if max_element.kind_of? Array\n max_element.first\n else\n max_element\n end\n end",
"def max\n @max\n end",
"def max\n @max\n end",
"def max(field)\n grouped(:max, field.to_s, Javascript.max)\n end",
"def maximum(arr)\n\t# for empty input\n\treturn nil if arr.nil? or arr.length == 0\n\tmax = arr[0]\n\tarr.each do |elem|\n\t\tmax = elem if elem > max\n\tend\n\tmax\nend",
"def last_column\n column( maxcol )\n end",
"def max\n @raw.max_by(&:score)\n end",
"def col_max\n return 3\n end",
"def col_max\n return 3\n end",
"def bitfield_max column_name\n @@bitfields[column_name].values.sum.max\n end",
"def find_max_value(array)\n array.max { |item|\n return array.max\n }\n \nend",
"def custom_max(array)\n array.max\nend",
"def my_max(array)\n\t\n\tarray.max\n\nend",
"def max\n self.class.max\n end",
"def max(&block)\n flag = true # 1st element?\n result = nil\n self.each{|*val|\n val = val.__svalue\n if flag\n # 1st element\n result = val\n flag = false\n else\n if block\n result = val if block.call(val, result) > 0\n else\n result = val if (val <=> result) > 0\n end\n end\n }\n result\n end",
"def max\n \n # Establecemos valor del primer elemento\n max = @matrix[0][0]\n i = 0\n \n #Fila a fila actualizando el valor maximo\n while (i < @fil)\n j = 0\n while (j < @col)\n if (@matrix[i][j] > max)\n max = @matrix[i][j]\n end\n j += 1\n end\n i += 1\n end\n max\n end",
"def max\n empty? ? -Float::INFINITY : @list.head.value[2]\n end",
"def max\n @maxNumber\n end",
"def max\n self.reduce('lambda{|memo, item| memo > item ? memo : item }')\n end",
"def max\n\t\tm = self.valor[0]\n\t\tfor i in (0...self.valor.size.to_i)\n\t\t\t\tif (self.valor[i]> m)\n\t\t\t\t\tm = self.valor[i]\n\t\t\t\tend\n\t\tend\n\t\treturn m\n\tend",
"def visit_axiom_aggregate_maximum(maximum)\n # TODO: wrap this in a coalesce operation once the default can be made sane\n unary_prefix_operation_sql(MAXIMUM, maximum)\n end",
"def get_max(passed_values=nil)\n passed_values && passed_values.count > 0 ? passed_values.max : values.max\n end",
"def max\n maximo = 0.to_f\n for i in 0...matriz.size\n if matriz[i] != nil\n matriz[i].each do |key, value|\n if matriz[i][key].to_f > maximo\n maximo = matriz[i][key].to_f\n end\n end\n end\n end\n maximo\n end",
"def reduce_max(input, axis: nil, keepdims: false, dtype: nil)\n axis ||= reduction_dims(input, dtype: dtype)\n RawOps.max(input, axis, keep_dims: keepdims, typeT: dtype)\n end",
"def maxvalue\n MAXVALUE\n end",
"def get_max\n\t\t@max.last\n\tend",
"def max\n max = @valor[0][0]\n i=0\n\tself.fil.times do |i|\n\t j=0\n\t self.col.times do |j|\n\t if (@valor[i][j] > max)\n\t\t max= @valor[i][j]\n\t end\n\t j=j+1\n\t end\n\ti=i+1\n end\n max\n end",
"def max_element\n self.to_a.max\n end",
"def findmax(*values)\n \tvalues.max\n end",
"def find_max(some_array)\n max = nil\n some_array.each do |val|\n \tif max.nil?\n max=val\n elsif val>max\n max=val\n end\n end\n max\nend",
"def get_max_value()\n temp = @symtable.values\n temp.keep_if { |x| x.to_i < 16384 }\n temp.max\n end",
"def max(input_a, input_b, name: nil)\n check_allowed_types(input_a, NUMERIC_TYPES)\n check_allowed_types(input_b, NUMERIC_TYPES)\n input_a, input_b = check_data_types(input_a, input_b)\n _op(:max, input_a, input_b, name: name)\n end",
"def effective_maximum\n maximum_bound ? maximum_bound.value : Infinity\n end",
"def col_max\n return COLUMNS_MAX\n end",
"def max arr\n\tarr.max\nend",
"def maximum(tree_node = @root)\n return nil if tree_node.nil?\n return maximum(tree_node.right) if tree_node.right\n tree_node\n end",
"def max\n return -1 if @count == 0\n return @array[0]\n end",
"def max\n temp = @first\n maxValue = -999999\n while !temp.nil?\n if temp.value > maxValue\n maxValue = temp.value\n end\n temp = temp.next\n end\n maxValue\n end",
"def maximum(input_a, input_b, name: nil)\n check_allowed_types(input_a, NUMERIC_TYPES)\n check_allowed_types(input_b, NUMERIC_TYPES)\n input_a, input_b = check_data_types(input_a, input_b)\n max(input_a, input_b, name: name)\n end",
"def maximum(tree_node = @root)\n return nil unless tree_node\n unless tree_node.right\n tree_node\n else\n maximum(tree_node.right)\n end\n end",
"def find_max_value(array)\n sorted_array = array.sort\n sorted_array.last\nend",
"def max(*args)\n args.max\n end",
"def highest_value_square(values)\n @current_row = values.max_by{ |k,v| k }[1][0]\n @current_column = values.max_by{ |k,v| k }[1][1]\nend",
"def maximum(tree_node = @root)\n right = tree_node.right\n return tree_node if right.nil?\n right.right ? maximum(right) : right\n end",
"def max\n maximum = values.max\n select { |k, v| v == maximum }\n end",
"def max=\n end",
"def find_highest_value(hash)\n highest_value = hash.max_by {|driver, value| value}[0]\nend",
"def maximum(tree_node = @root)\n\t\t\t\t\treturn tree_node.right ? maximum(tree_node.right) : tree_node\n end",
"def find_max_value(array)\n array.sort!\n return array[-1]\nend"
] | [
"0.7988431",
"0.7961002",
"0.79240566",
"0.77619857",
"0.755031",
"0.69626755",
"0.68912166",
"0.68822396",
"0.6876943",
"0.6874555",
"0.6835841",
"0.681519",
"0.6812449",
"0.68052596",
"0.6793194",
"0.6715649",
"0.6711083",
"0.66858816",
"0.66575557",
"0.66409886",
"0.6638255",
"0.6595397",
"0.6593761",
"0.6562346",
"0.65613455",
"0.65568066",
"0.65565145",
"0.6554062",
"0.6547584",
"0.6532618",
"0.65321344",
"0.6505296",
"0.6473427",
"0.64629066",
"0.6450917",
"0.64277416",
"0.640046",
"0.640046",
"0.63982165",
"0.63964474",
"0.6356456",
"0.6353896",
"0.6353896",
"0.63468516",
"0.63448966",
"0.6335674",
"0.6335103",
"0.63172656",
"0.6311909",
"0.6293688",
"0.62678534",
"0.6264051",
"0.6204959",
"0.6204959",
"0.6204931",
"0.6204484",
"0.6185",
"0.6181272",
"0.6176648",
"0.6176648",
"0.6172357",
"0.6156926",
"0.6132357",
"0.61104417",
"0.6105431",
"0.6099003",
"0.6080945",
"0.6066533",
"0.6055219",
"0.60459906",
"0.60316926",
"0.6019763",
"0.60196316",
"0.6017549",
"0.60075796",
"0.600721",
"0.60067075",
"0.5999643",
"0.5984002",
"0.5964005",
"0.5962942",
"0.59427726",
"0.594199",
"0.59296054",
"0.59149945",
"0.59077686",
"0.5905258",
"0.58937716",
"0.5874127",
"0.58708584",
"0.5868152",
"0.5866545",
"0.58648735",
"0.5856461",
"0.5855785",
"0.58479047",
"0.58346504",
"0.5832714",
"0.5831769",
"0.5830284"
] | 0.80665714 | 0 |
Same as maximum but perform the query asynchronously and returns an ActiveRecord::Promise | def async_maximum(column_name)
async.maximum(column_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def calculate_latest_submissions\n max_version_subquery = \"SELECT * FROM (SELECT MAX(version), course_user_datum_id\n FROM `submissions`\n WHERE assessment_id = #{id} AND ignored = FALSE\n GROUP BY course_user_datum_id) AS x\"\n Submission.select(\"submissions.*\").where(\"(version, course_user_datum_id) IN (#{max_version_subquery}) AND assessment_id = ?\", id)\n end",
"def find_max()\r\n self.max\r\n end",
"def last **args\n query( **( { order: {\"@rid\" => 'desc'} , limit: 1 }.merge args)).execute(reduce: true)\n\tend",
"def highest_student_gpa\n \"SELECT gpa from students ORDER BY gpa DESC LIMIT 1\"\nend",
"def get_max()\n end",
"def get_max\n @max ||= calculate_max\n end",
"def async_minimum(column_name)\n async.minimum(column_name)\n end",
"def minmax(run)\n db = SQLite3::Database.open \"rand_int.db\" #open Database\n results = db.get_first_row \"SELECT * FROM Random WHERE entry=#{run}\" #get only first row of result set\n rescue SQLite3::Exception => e \n \n puts \"Exception occured\"\n puts e\n \n ensure\n db.close if db\n\n return \"Min: #{results[2]}, Max: #{results[3]}\"\nend",
"def get_max\n\t\t@max.last\n\tend",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def highest_gdp\n # Which countries have a GDP greater than every country in Europe? (Give the\n # name only. Some countries may have NULL gdp values)\n execute(<<-SQL)\n SELECT\n name\n FROM\n countries\n WHERE\n gdp > (\n SELECT\n MAX(gdp)\n FROM\n countries\n WHERE\n continent = 'Europe'\n )\n SQL\nend",
"def maximum\n return self[0]\n end",
"def max_weight(db, lift)\n db.execute(\"SELECT date, lift, MAX(weight), reps FROM lifts WHERE lift = '#{lift}'\")\nend",
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def highest_gdp\n # Which countries have a GDP greater than every country in Europe? (Give the\n # name only. Some countries may have NULL gdp values)\n execute(<<-SQL)\n SELECT\n name\n FROM\n countries\n WHERE\n gdp > (\n SELECT\n MAX(gdp)\n FROM\n countries\n WHERE\n continent = 'Europe'\n GROUP BY\n continent\n );\n SQL\nend",
"def async_lookup(args)\n unless (Hash === args)\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all do |rows|\n yield(rows.any? ? rows.first : nil)\n end\n\n return\n end",
"def job_div_pop_max\n\t\tJobsCity.connection.select_all(\"SELECT MAX(CAST(#{self.numberjobs}*1000 AS float)/CAST(#{self.population} AS float)) FROM jobs_cities\")\n\tend",
"def big_three(db)\n max_weights = []\n max_weights.push(db.execute(\"SELECT MAX(weight) FROM lifts WHERE lift = 'squat'\"))\n max_weights.push(db.execute(\"SELECT MAX(weight) FROM lifts WHERE lift = 'bench press'\"))\n max_weights.push(db.execute(\"SELECT MAX(weight) FROM lifts WHERE lift = 'deadlift'\"))\n max_weights\nend",
"def find_post_highest_likes\n highest= self.posts.max do |like|\n like.likes\n end\n highest.title\nend",
"def newest(constraints = {})\n constraints.merge!(order: :created_at.desc)\n _q = query(constraints)\n _q.define_singleton_method(:method_missing) { |m, *args, &block| self.results.send(m, *args, &block) }\n _q\n end",
"def max_value(table_name, column_name)\n result = @client.query(\"SELECT MAX(`#{column_name}`) FROM `#{table_name}`;\")\n\n result.first.values[0] || 0\n end",
"def get_max\n @max\n end",
"def max(field = nil)\n block_given? ? super() : aggregates(field)[\"max\"]\n end",
"def last_submission\n Submission.find_by_sql(\" select * from submissions\n where exercise_id = #{exercise_id} AND\n user_id = #{user_id}\n order by created_at desc\n limit 1\").first\n end",
"def last_update\n request = <<-STRING\n SELECT max(updated_at) FROM\n (SELECT updated_at FROM schools WHERE id = #{id}\n UNION\n SELECT updated_at FROM teachers WHERE school_id = #{id}\n UNION\n SELECT updated_at FROM addresses WHERE school_id = #{id}\n UNION\n SELECT c.updated_at FROM courses c, addresses a WHERE a.school_id = #{id} AND c.address_id = a.id\n UNION\n SELECT p.updated_at FROM properties p, courses c, addresses a WHERE a.school_id = #{id} AND c.address_id = a.id AND p.course_id = c.id) as subquery\n STRING\n return ActiveRecord::Base.connection.execute(request).first[\"max\"]\n end",
"def max\n self.reduce('lambda{|memo, item| memo > item ? memo : item }')\n end",
"def maximum(project_id, event_collection, target_property, options = {})\n options[:event_collection] = event_collection\n options[:target_property] = target_property\n\n resource \"projects/#{project_id}/queries/maximum\", options\n end",
"def games_with_player(player_id, league_id, limit)\n database do |db|\n return db.execute('SELECT GameID From Game\n WHERE PlayerID = :player_id\n AND LeagueID = :league_id\n ORDER BY Timestamp DESC, GameID DESC\n LIMIT :limit',\n player_id, league_id, limit).flatten\n end\nend",
"def find_max_game( log_file)\n \n begin\n #db = check_db_lock( log_file )\n db = SQLite3::Database.open log_file\n db.execute \"PRAGMA journal_mode = WAL\"\n stm = db.prepare \"SELECT max(UniqueGameID) FROM Game LIMIT 1\" \n rs = stm.execute\n rs.each do |row|\n #puts \"row = #{row}\"\n #puts \"row[1] = #{row[0]}\"\n return row[0].to_i\n end \n rescue SQLite3::Exception => e \n \n puts \"Exception occurred in find_max_game\"\n puts e\n \n ensure\n stm.close if stm\n db.close if db\n end\n stm.close if stm\n db.close if db\nend",
"def query_wait sql, waiting_time = 10\n result = db[sql].all\n if result.empty?\n if waiting_time != 0\n sleep 1\n result = query_wait(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def find_max_id(db, table)\r\n\t# Get list of ids\r\n\tid_array = db.execute(\"SELECT id FROM #{table}\")\r\n\t# Loop through list, checking for the highest number id\r\n\tmax_id = 0\r\n\tid_array.each do |cur_id|\r\n\t\tif cur_id[\"id\"] > max_id\r\n\t\t\tmax_id = cur_id[\"id\"]\r\n\t\tend\r\n\tend\r\n\t# Return the highest number found\r\n\treturn max_id\r\nend",
"def find_max\r\n return nil if !@head\r\n cursor = @head\r\n max = cursor.data\r\n while cursor\r\n if cursor.data > max\r\n max = cursor.data\r\n end\r\n cursor = cursor.next\r\n end\r\n return max\r\n end",
"def get_max_results\n @max_results\n end",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def max\n end",
"def max\n end",
"def last\n result ? all.last : limit(1).descending.all.last\n end",
"def async_average(column_name)\n async.average(column_name)\n end",
"def max\n if valid?\n max_value\n end\n end",
"def max; end",
"def max; end",
"def max()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Functions::Max::MaxRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def max\n return -1 if @count == 0\n return @array[0]\n end",
"def select_youngest_bear_and_returns_name_and_age\n \"SELECT name, age FROM bears ORDER BY age ASC LIMIT 1\"\nend",
"def latest\n first_one(&:latest)\n end",
"def latest_submission!\n if (max_version = Submission.where(assessment_id: assessment_id,\n course_user_datum_id: course_user_datum_id,\n ignored: false).maximum(:version))\n Submission.find_by(version: max_version, assessment_id: assessment_id,\n course_user_datum_id: course_user_datum_id)\n end\n end",
"def maximize(range = 1..1_000_000)\n (range).lazy_map { |x| [yield(x), x] }.max[1]\nend",
"def winner(game_id, league_id)\n database do |db|\n # get max score\n winner = db.execute('SELECT PlayerID FROM Game\n WHERE GameID = :game_id\n AND LeagueID = :league_id\n AND Score = (\n SELECT MAX(Score) FROM Game\n WHERE GameID = :game_id\n AND LeagueID = :league_id\n GROUP BY GameID\n )',\n game_id, league_id).flatten\n\n winner = winner.first if winner.length == 1\n\n # return the winner(s)\n return winner\n end\nend",
"def index\n @tweets = Tweet.all\n @last_tweet = @tweets.max_by{ |x| x.id}\n\n #100.times {\n # @tweets = Tweet.find(:all, :conditions => [ \"id > ?\", \"#{@last_tweet.id}\" ]) \n # @last_tweet = @tweets.max_by{ |x| x.id}\n #}\n end",
"def max\n @data[0]\n end",
"def max\n @v.compact.max\n rescue\n nil\n end",
"def max\n temp = @first\n maxValue = -999999\n while !temp.nil?\n if temp.value > maxValue\n maxValue = temp.value\n end\n temp = temp.next\n end\n maxValue\n end",
"def first **args\n query( **( { order: \"@rid\" , limit: 1 }.merge args)).execute(reduce: true)\n\tend",
"def last_results\n $sql_multi ? $sql_results.last : $sql_results\nend",
"def autosizedMaximumLoopFlowRate\n\n result = OpenStudio::OptionalDouble.new()\n\n name = self.name.get.upcase\n \n model = self.model\n \n sql = model.sqlFile\n \n if sql.is_initialized\n sql = sql.get\n \n query = \"SELECT Value \n FROM tabulardatawithstrings \n WHERE ReportName='ComponentSizingSummary' \n AND ReportForString='Entire Facility' \n AND TableName='PlantLoop' \n AND RowName='#{name}' \n AND ColumnName='Maximum Loop Flow Rate' \n AND Units='m3/s'\"\n \n val = sql.execAndReturnFirstDouble(query)\n \n if val.is_initialized\n result = OpenStudio::OptionalDouble.new(val.get)\n end\n \n end\n\n return result\n \n end",
"def max(&block)\n flag = true # 1st element?\n result = nil\n self.each{|*val|\n val = val.__svalue\n if flag\n # 1st element\n result = val\n flag = false\n else\n if block\n result = val if block.call(val, result) > 0\n else\n result = val if (val <=> result) > 0\n end\n end\n }\n result\n end",
"def async_result()\n #This is a stub, used for indexing\n end",
"def select_youngest_bear_and_returns_name_and_age\n 'SELECT min(bears.name),(bears.age) FROM bears ORDER BY age ASC LIMIT 1'\nend",
"def value(timeout = nil)\n return @result if @result\n \n poll_for_result\n \n if notify && pool # listen for task completion notification if the task is set up to do that\n listen_for_result_notification(timeout)\n elsif result_uri.nil? # wait for a result if we don't already have one\n wait_for_result(timeout)\n end\n \n @result\n end",
"def selects_oldest_bear_and_returns_name_and_age\n \"SELECT name, age\n FROM bears\n ORDER BY age DESC \n LIMIT 1\"\nend",
"def max\n dates.last \n end",
"def first(n=1)\n return values[0] if self.class == BaseRelation && loaded && n == 1\n result = limit(n).load\n result.length == 1 ? result[0] : result\n end",
"def last(limit=1)\n limit(limit).reverse_order.load.first\n end",
"def max(items)\n#(was not sure if instructions meant the method.max. this is my solution only restricting the method: max)\n max = items.sort.last\nend",
"def max\n @max\n end",
"def max\n @max\n end",
"def future(qualifier=nil)\n return @results[2,4] if qualifier.nil?\n end",
"def largest_value\n values.max\n end",
"def year_with_most_guests\n sql = <<-SQL\n SELECT year FROM guests GROUP BY year\n ORDER BY count(*) DESC LIMIT 1;\n SQL\n DB[:conn].execute(sql)[0][0]\nend",
"def process(now = Time.now, max_time=nil)\n t = collect { |d| d.process(now) }.compact.min\n t = max_time if max_time && t > max_time\n t\n end",
"def max\r\n temp = @first\r\n maxValue = -999999\r\n while !temp.nil?\r\n if temp.value > maxValue\r\n maxValue = temp.value\r\n end\r\n temp = temp.next\r\n end\r\n maxValue\r\n end",
"def latest_pipeline(pipelines)\n pipelines.max_by{|p| p.id }\nend",
"def max_by(array)\n return nil if array.empty?\n result = []\n array.each { |num| result << yield(num) }\n array.fetch(result.index(result.max))\nend",
"def find_max_value(array)\n array.max { |item|\n return array.max\n }\n \nend",
"def value\n if @dataset.queryable_source? && @entso_query.present?\n return @dataset.execute_query(@entso_query)\n end\n\n latest ? latest.value : default\n end",
"def maximum\n object.maximum.to_f\n end",
"def result_for_query(query)\n results_for_query(query).first\n end",
"def maximum_value\n @maximum_value || store.max\n end",
"def last(*args)\n last_arg = args.last\n\n limit = args.first if args.first.kind_of?(Integer)\n with_query = last_arg.respond_to?(:merge) && !last_arg.blank?\n\n query = with_query ? last_arg : {}\n query = scoped_query(query.merge(:limit => limit || 1)).reverse\n\n # tell the Query to prepend each result from the adapter\n query.update(:add_reversed => !query.add_reversed?)\n\n if !with_query && (loaded? || lazy_possible?(tail, limit || 1))\n if limit\n new_collection(query, super(limit))\n else\n super()\n end\n else\n if limit\n all(query)\n else\n relate_resource(query.repository.read_one(query))\n end\n end\n end",
"def max\n temp = @first\n maxValue = -999999\n while !temp.nil?\n if temp.value > maxValue\n maxValue = temp.value\n end\n temp = temp.next\n end\n return maxValue\n end",
"def find_max_value (array)\n array.max # Add your solution here\nend",
"def max\r\n\ttemp = @first\r\n\tmaxValue = nil\r\n\tif !temp.nil? then\r\n\t\tmaxValue = temp.value\r\n\t\ttemp = temp.next\r\n\tend\r\n\twhile !temp.nil?\r\n\t\tif temp.value > maxValue then\r\n\t\t\tmaxValue = temp.value\r\n\t\tend\r\n\t\ttemp = temp.next\r\n\tend\r\n\treturn maxValue\r\n end",
"def max_record\n # Since the records are only singly-linked in the forward direction, in\n # order to do find the last record, we must create a cursor and walk\n # backwards one step.\n max_cursor = record_cursor(supremum.offset, :backward)\n raise \"Could not position cursor\" unless max_cursor\n\n # Note the deliberate use of prev_record rather than record; we want\n # to skip over supremum itself.\n max = max_cursor.prev_record\n max if max != infimum\n end",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def query(sql, *bind_values)\n @pool.acquire { |conn| conn.query(sql, *bind_values).first }\n end",
"def best_bid\n @bids.max_by { |x| x.fetch(:price) }\n end",
"def result\n results.first\n end",
"def top_bid\n Bid.where('ad_id = ? ', self.id).maximum(:highest)\n end",
"def max\n @max ||= begin\n maxes = []\n options[:max].times { |t| maxes << dup_for(max: nil, change: nil, series: nil, extend_cache_life: options[:max] - t, period: period.prev(t + 1)).compute }\n maxes.max\n end if options[:max]\n end",
"def fetch(name, opts = {})\n return [] unless key = get_key(name)\n\n # TODO API inconsistent return format again:\n return last(name) if opts[:single] == 'last'\n return previous(name) if opts[:single] == 'previous'\n\n return fetch_timespans(key, opts) if opts[:as] == 'timespans'\n return fetch_bool(key, opts) if key[:type] == 'boolean'\n\n\n # below only numeric type\n scope = @db[:numeric_data_points].where(key_id: key[:id])\n scope = scope.where('time > :since', since: opts[:since]) if opts[:since]\n scope = scope.where('time < :until', until: opts[:until]) if opts[:until]\n\n if opts[:step]\n s = step opts[:step] # see Engine::Base#step\n # TODO use date_trunc for pg (main use case)\n if @db.adapter_scheme == :postgres\n f_time = \"floor((extract(epoch from time::timestamp with time zone)/#{s[:span]}))*#{s[:span]}\"\n scope = scope.select(Sequel.lit \"to_timestamp(#{f_time}) AS time\").group(Sequel.lit f_time).select_append { avg(value_avg).as(value_avg) }\n else # sqlite\n f_time = \"(strftime('%s',time)/#{s[:span]})*#{s[:span]}\"\n scope = scope.select(Sequel.lit \"datetime(#{f_time}, 'unixepoch') AS time\").group(Sequel.lit f_time).select_append { avg(value_avg).as(value_avg) }\n end\n end\n\n scope = scope.order(Sequel.desc(:time)).limit(100) # TODO limit option and order, these are defaults for testing\n #puts \"SQL: #{scope.sql}\"\n rows = scope.all\n rows.reverse.map do |r|\n [time_wrap(r[:time]), r[:value_avg]]\n end\n end",
"def find_max_locator\n return nil if empty?\n use_max\n mode_call(:find_max_loc)\n end",
"def currval(seq)\n $new.select_one(\"SELECT CASE WHEN is_called THEN last_value ELSE last_value-increment_by END from #{seq}\")[0]\nend",
"def biggest\n # self.compact.sort.last\n self.compact.max\n end",
"def biggest_stars\nMovieDatabase.execute(<<-SQL)\n SELECT\n a.name, COUNT(DISTINCT(m.id)) AS count\n FROM\n actor a\n JOIN\n casting c ON a.id = c.actorid\n JOIN \n movie m ON c.movieid = m.id\n WHERE\n c.ord = 1\n GROUP BY\n a.name\n HAVING\n COUNT(DISTINCT(m.id)) >= 30\n ORDER BY\n a.name;\nSQL\nend",
"def async_count(column_name = nil)\n async.count(column_name)\n end",
"def max\r\n temp = @first\r\n maxValue = -99999\r\n\r\n while !temp.nil?\r\n if temp.value > maxValue then\r\n maxValue = temp.value \r\n end \r\n temp = temp.next\r\n end \r\n maxValue\r\nend",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def lock_down_maximum\n m = Arturo::Feature.maximum(:updated_at)\n allow(Arturo::Feature).to receive(:maximum).and_return(m)\n end",
"def maximum_concurrency\n self.class.maximum_concurrency self\n end",
"def highest_bid\n\t self.bids.order(amount: :desc).first\n\t end"
] | [
"0.5775883",
"0.5580071",
"0.55321914",
"0.5442837",
"0.5436593",
"0.53789496",
"0.53772753",
"0.5294841",
"0.5290199",
"0.5259959",
"0.5196192",
"0.51851493",
"0.51657784",
"0.516499",
"0.5159016",
"0.51292676",
"0.5123591",
"0.51186675",
"0.51104236",
"0.5088175",
"0.5071647",
"0.506991",
"0.504581",
"0.50246614",
"0.50163823",
"0.5013504",
"0.5009012",
"0.49771276",
"0.4973384",
"0.495869",
"0.4939759",
"0.49248406",
"0.49245834",
"0.49218434",
"0.49147877",
"0.49147877",
"0.4906229",
"0.48992318",
"0.48983",
"0.48852244",
"0.48852244",
"0.48699138",
"0.48672053",
"0.4860102",
"0.48548955",
"0.4848457",
"0.4843235",
"0.48386407",
"0.4834608",
"0.4833507",
"0.48329616",
"0.48285422",
"0.48232946",
"0.48141086",
"0.48135686",
"0.48045135",
"0.4800079",
"0.47974962",
"0.47961056",
"0.47906396",
"0.47894564",
"0.4781884",
"0.47814986",
"0.47762653",
"0.47727844",
"0.47727844",
"0.47680756",
"0.4766901",
"0.4762105",
"0.47615108",
"0.4754614",
"0.47368348",
"0.4736676",
"0.47364998",
"0.4733662",
"0.47330493",
"0.47322002",
"0.47158763",
"0.4714235",
"0.47130966",
"0.47111663",
"0.47035518",
"0.47025043",
"0.4698669",
"0.4696851",
"0.469255",
"0.46833456",
"0.46823016",
"0.4678815",
"0.4675416",
"0.46673888",
"0.46617585",
"0.4657962",
"0.4657595",
"0.46572888",
"0.46563035",
"0.46516255",
"0.4649819",
"0.4649444",
"0.4643688"
] | 0.7333761 | 0 |
Calculates the sum of values on a given column. The value is returned with the same data type of the column, +0+ if there's no row. See calculate for examples with options. Person.sum(:age) => 4562 | def sum(initial_value_or_column = 0, &block)
if block_given?
map(&block).sum(initial_value_or_column)
else
calculate(:sum, initial_value_or_column)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sum(column)\n result = all\n\n if result.any?\n result.inject(0.0) do |acc, record|\n if value = record.public_send(column)\n acc += value\n end\n\n acc\n end\n end\n end",
"def sum(attr)\n col = column(attr)\n if col.empty?\n 0\n else\n col.reduce(&:+)\n end\n end",
"def sum_column(column_name)\n @result.inject(0.0) { |sum, item_hash|\n if item_hash.has_key?(column_name)\n sum + item_hash[column_name].to_f\n else\n sum\n end\n }\n end",
"def col_sum(col_num)\r\n\t\tsum = 0\r\n\t\tself.get_column(col_num).each do |x|\r\n\t\t\tsum += x\r\n\t\tend\r\n\t\tsum\r\n\tend",
"def total_sum\n rows.collect(&:sum_with_vat).sum\n end",
"def sum(column_name)\n \traise ActiveRecord::StatementInvalid, \"Missing column name entered!\" if column_name.is_a? Integer\n \tbegin\n \t\tself.catalog_analytics.sum(column_name)\n \trescue ActiveRecord::StatementInvalid\n \t\traise ActiveRecord::StatementInvalid, \"Missing column name entered!\"\n \tend\t\n end",
"def column_sum(input_query, key)\n input_query.sum(key)\n end",
"def sum(attr)\n if empty?\n SpookAndPuff::Money.new('0')\n else\n map(&attr).sum\n end\n end",
"def total(rows)\n rows.map { |row| row[\"Money we made\"].to_i }.reduce(:+)\nend",
"def rowsum a,row\n sum=0\n 0.upto(4){|i| sum+=a[row][i]}\n return sum.to_f\nend",
"def sum(field = nil)\n block_given? ? super() : aggregates(field)[\"sum\"] || 0\n end",
"def find_sum\n self.inject(0) { |sum, x| sum + x.to_f }\n end",
"def record_sum(session, record_type, database)\n session.send(database).select_one( \\\n \"select sum(number1) + sum(number2) + sum(number3) + sum(number4) as sum\n from big_scan where diff_type = '#{record_type}'\")['sum'].to_f\n end",
"def sum\n reduce(0, &:+)\n end",
"def sum(attribute_name, options = {})\n calculate(:sum, attribute_name, options)\n end",
"def odb_sum\n \"sum(#{to_s})\"\n end",
"def amount_sum_for(meth)\n entries.select(&meth).map{ |entry| entry.amount.to_i }.compact.inject(&:+) || 0\n end",
"def total_amount(rows)\n rows.each.map { |row|\n row.amount.round(2)\n }.reduce(0, :+)\n end",
"def total_value\r\n return 0 if self.value.nil?\r\n self.value + self.expenses.includes(:expense).map{ |e|\r\n (e.quantity || 0) * (e.expense.signed_price || 0)\r\n }.sum\r\n end",
"def visit_axiom_aggregate_sum(sum)\n aggregate_function_sql(SUM, sum)\n end",
"def sum\n\t\treturn self.reduce(:+)\n\tend",
"def sum(field, opts={})\n opts = ::Hashie::Mash.new(opts)\n all(opts).inject(0){|sum, item| (item.send(field) || 0) + sum }\n end",
"def sum(heading)\n sum = 0\n @rows.each {|row| sum += (row.data[index(heading)].blank? ? 0.0 : row.data[index(heading)])}\n sum\n end",
"def calculate_sum\n 1000000000000\n end",
"def sum(p = nil)\n output = self.compact\n if p.nil?\n return output.inject(0){ |memo,n| memo + n.to_f }\n else\n return output.inject(0){ |memo,n| memo + n.send(p).to_f }\n end\n end",
"def total\n Float(@values.values.reduce(:+))\n end",
"def sum\n transactions.sum(:amount)\n end",
"def sum\n transactions.sum(:amount)\n end",
"def sum; end",
"def sum\n self.reduce('lambda{|sum, item| sum + item}')\n end",
"def house_total(house)\n return house.inject(0) {|sum, cell| sum + cell.to_i }\n end",
"def column_sum( day_number )\n sum = 0.0\n\n # [TODO] Slow. Surely there's a better way...?\n\n self.timesheet_rows.all.each do | timesheet_row |\n work_packet = WorkPacket.find_by_timesheet_row_id(\n timesheet_row.id,\n :conditions => { :day_number => day_number }\n )\n\n sum += work_packet.worked_hours if work_packet\n end\n\n return sum\n end",
"def sum\n self.inject(:+)\n end",
"def total\n sum(:total)\n end",
"def amount(col_name)\r\n amount = send(col_name).to_f unless (send(col_name).blank? || send(col_name) == 0.00)\r\n if amount\r\n amount = ((amount == amount.truncate) ? amount.truncate : amount)\r\n amount\r\n else\r\n 0\r\n end\r\n end",
"def sum_field_where(sum_field, where_field, where_value, where_relationship)\n result = run_sql(\"SELECT SUM(#{sum_field}) FROM #{table_name} WHERE #{where_field} #{where_relationship} #{add_quotes_if_string(where_value)};\")\n if result.is_a? Array\n result.first[0]\n else\n result\n end\n end",
"def sum arr\n # YOUR CODE HERE\n total = arr.sum\n \n return total\nend",
"def sum_values\n points.sum\n end",
"def element_sum\n\t\tif !self.numeric?\n\t\t\treturn false\n\t\tend\n\t\treturn self.inject(0) {|memo,n| memo + n}\t\n\tend",
"def sum arr\n sum = arr.sum\n return sum\nend",
"def sum_expense(expenses)\n sum = expenses.sum\n return sum\nend",
"def sum arr\n\ttotal = 0\n\tif arr.size > 0\n\t\ttotal = arr.reduce(:+)\n\tend\n\n\treturn total\nend",
"def sum(field)\n sum = documents.inject(nil) do |memo, doc|\n value = doc.send(field)\n memo ? memo += value : value\n end\n end",
"def sum(field)\n grouped(:sum, field.to_s, Javascript.sum)\n end",
"def async_sum(identity_or_column = nil)\n async.sum(identity_or_column)\n end",
"def subtotal\n #SQL version of summing which is much faster than Ruby\n line_items.select(\"SUM(quantity * price) AS sum\")[0].sum\n # Ruby version of sum\n # line_items.to_a.sum {|item| item.total}\nend",
"def on_call_sum(context, expression)\n nodes = process(expression, context)\n sum = 0.0\n\n unless nodes.is_a?(XML::NodeSet)\n raise TypeError, 'sum() can only operate on NodeSet instances'\n end\n\n nodes.each do |node|\n sum += node.text.to_f\n end\n\n return sum\n end",
"def sum(options = {})\n if options[:values]\n case options[:values]\n when :positive, :non_negative\n @v.sum_positive\n when :negative, :non_positive\n @v.sum_negative\n when :zero\n 0.0\n when :all, :non_zero\n @v.sum_all\n else\n raise ArgumentError, \"Option not recognized\"\n end\n else\n @v.sum_all\n end\n end",
"def sum\n only_with('sum', 'Numeric', 'String')\n items.compact.sum\n end",
"def sum(num, total) => total += num",
"def sumif( find_header, sum_header )\n return to_enum( :sumif ) unless block_given?\n find_col, sum_col = ch( find_header ), ch( sum_header )\n find_col.each_cell.inject(0) { |sum,ce| yield( ce.value ) && ce.row > header_rows ? sum + sum_col[ ce.row ] : sum }\n end",
"def sum(anArray)\n if anArray.empty? then\n return 0.0\n else\n total = 0\n anArray.each {|i| total = total + i}\n return total\n end\nend",
"def sum col_name,field2sum,*fields, &block\n key = key_fields [field2sum] + fields\n @stats[key] = [] unless @stats[key]\n @stats[key] << SumField.new(col_name,block)\n end",
"def sum(options = {})\n if options[:values]\n opt = options[:values].to_sym\n case opt\n when :positive, :non_negative\n @v.sum_positive\n when :negative, :non_positive\n @v.sum_negative\n when :zero\n 0.0\n when :all, :non_zero\n @v.sum_all\n else\n raise ArgumentError, \"Option '#{opt}' not recognized\"\n end\n else\n @v.sum_all\n end\n end",
"def total(variable)\n\treturn variable.inject{|sum, x| sum + x}\nend",
"def sum (tableau)\n chiffre = 0\n tableau.each do |element|\n chiffre = chiffre + element\n end\n return chiffre\nend",
"def sum\n flatten.compact.inject(:+)\n end",
"def sum arr\n if !arr.empty?\n soma = 0\n arr.each do |elt| ; soma += elt ; end\n return soma\n end\n return 0 \nend",
"def sum arr\n arr.reduce(0, :+)\nend",
"def sum arr\n arr.reduce(0, :+)\nend",
"def sum_by(attribute)\n @legs.pluck(attribute).select{|i| i.is_a?(Numeric)}.reduce(&:+)\n end",
"def total_expenses\n expenses.sum(:amount) || 0.0\n end",
"def sum\n\t\tprintf(\"%02i.%i\\n\", @options[:monat], @options[:jahr])\n\t\t@db.execute(\"select summe, gemeinsam from sum_#{@options[:name]} where jahr = #{@options[:jahr]} and monat = #{@options[:monat]} \") do |row|\n\t\t\tprintf(\"(%s) % .2f EUR \\n\", row[1], row[0])\n\t\tend\n\tend",
"def sum(arr)\n arr.reduce {|a,b| a+b}\n #correction : arr.reduce(:+,0) marche aussi\nend",
"def sum\n inject(0) { |acc, i| acc + i }\n end",
"def sum arr\n #return 0 if arr.empty?\n # arr.inject(:+)\n arr.inject(0, :+)\nend",
"def abs_sum()\n self.abs().sum()\n end",
"def sum arr\n if arr.length==0\n return 0\n end\n s = arr.reduce(:+)\n return s\nend",
"def sum(sums)\r\n if sums.empty?\r\n 0\r\n else\r\n sums.reduce(:+)\r\n end\r\nend",
"def sum n\n\tbegin\n\tn.reduce(:+)\n\trescue \n\t\tn.map!{|x| x.is_a?(Array) ? x.reduce(:+) : x }\n\t\tsum n\n\tend\n\tp n.reduce(:+)\nend",
"def sum arr\n return 0 if arr.empty?\n arr.inject(:+)\nend",
"def total(arr)\n x = arr.reduce(:+)\n return x\nend",
"def total(x)\n sum = 0\n\tx.each do |i|\n\t\tsum += i\n\tend\n\treturn sum\nend",
"def sum(array)\n\t\tsum = 0 #initialise 0 \n\t\tif array.empty? # si tableau vide = rien \n\t\treturn 0 # renvoie 0\n\t\telse # sinon\n\t\tarray.each{|x| sum = sum + x} # tous les elements du tableaux / incremente le code sum\n \t\treturn sum\n\t\tend\n\tend",
"def calculate_score\n @results.collect { |mir| mir.send(@column.to_sym) }.sum\n end",
"def sum arr\n # return arr.empty? ? 0 : arr.inject(:+)\n arr.inject(0, :+)\nend",
"def total(my_array)\n my_array.sum\n end",
"def sum(arr)\n arr.reduce(0, :+)\nend",
"def sum(arr)\n return 0 if arr.empty?\n arr.reduce(:+)\nend",
"def summing_method\n @summed_array = @single_digit_array.sum\n end",
"def total ( numbers )\r\n\r\n\treturn numbers.reduce(:+);\r\n\r\nend",
"def total(a)\n\ta.reduce(:+)\nend",
"def sum(array)\n\tanswer = 0\n\tif array.length > 0 then\n\t\tarray.each {|x| answer += x}\n\telse\n\t\treturn 0\n\tend\n\treturn answer\nend",
"def sum(element)\n element.reduce(:+)\nend",
"def expense_total\n self.expenses.sum(:amount).to_f\n end",
"def sum arr\n # YOUR CODE HERE\n lSum = 0\n arr.each {|x| lSum += x}\n return lSum\nend",
"def total(array)\n\tsum = array.inject(0, :+)\nend",
"def sum(array)\n array.map(&:to_i).reduce(0, :+)\nend",
"def total(my_array)\n result=0\n my_array.each { |a| result= result + a }\n return result\n end",
"def total\n votes.sum(:value)\n end",
"def total(an_array)\n an_array.reduce(:+)\nend",
"def sum(array)\n return 0 if array.empty?\n array.inject(:+)\nend",
"def point_sum(point_kind)\n mark = Mark.find_by_user_id_and_point_kind_id(self.id, point_kind.id)\n mark == nil ? 0 : mark.point_sum\n end",
"def column_total=(total)\n @sum = total % 10\n @carry = total / 10\n end",
"def total (array)\n\tsum = 0\n\n\tarray.each {|x| sum = sum + x}\n\treturn sum\nend",
"def total_expenses\n self.expenses.sum(\"amount\")\n end",
"def sum_values(arr)\n arr.inject(:+)\n end",
"def sum arr\n result = 0\n #Iterate through the length of the array to find the sum of the array elements\n if arr.length > 0 then\n arr.each do |index|\n\tresult += index\n end\n end\n return result\nend",
"def sum(array)\n array.reduce(0, :+)\n=begin\n res =0\n array.each { |a| res += a }\n res.to_i\n=end\nend",
"def sum arr\n res = 0\n arr.each {|i| res+=i}\n return res\nend"
] | [
"0.82282233",
"0.7980951",
"0.7726712",
"0.73051023",
"0.70191306",
"0.7005354",
"0.69311947",
"0.67743874",
"0.67564225",
"0.6572698",
"0.6436665",
"0.6418924",
"0.63971466",
"0.6380664",
"0.6364009",
"0.63602984",
"0.63245946",
"0.6299268",
"0.62766486",
"0.6274544",
"0.61948955",
"0.615559",
"0.6136676",
"0.61050206",
"0.609996",
"0.6099938",
"0.6035436",
"0.6035436",
"0.6029764",
"0.60181546",
"0.6006854",
"0.60034174",
"0.5998632",
"0.59957075",
"0.5992609",
"0.5989625",
"0.59559816",
"0.5952222",
"0.5927855",
"0.59207064",
"0.59054273",
"0.5897501",
"0.58894706",
"0.5886133",
"0.5871395",
"0.5839004",
"0.58083075",
"0.5791766",
"0.5777752",
"0.5755158",
"0.5749359",
"0.5747312",
"0.57425815",
"0.5740771",
"0.5736627",
"0.57319474",
"0.5720311",
"0.5714201",
"0.5701116",
"0.56997114",
"0.56974083",
"0.56899863",
"0.56871706",
"0.56847006",
"0.5684199",
"0.5675524",
"0.56711006",
"0.56700367",
"0.56657684",
"0.56625",
"0.565888",
"0.56452906",
"0.56365347",
"0.56328756",
"0.56306624",
"0.56288487",
"0.56098926",
"0.56019574",
"0.56005293",
"0.5596854",
"0.5595487",
"0.5590071",
"0.55856234",
"0.55852324",
"0.5578578",
"0.55776334",
"0.5562487",
"0.554435",
"0.5542713",
"0.55322164",
"0.5524471",
"0.5523436",
"0.55214953",
"0.55179095",
"0.55130434",
"0.5501848",
"0.5499849",
"0.5496124",
"0.54933745",
"0.5492975"
] | 0.71741664 | 4 |
Same as sum but perform the query asynchronously and returns an ActiveRecord::Promise | def async_sum(identity_or_column = nil)
async.sum(identity_or_column)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def record_sum(session, record_type, database)\n session.send(database).select_one( \\\n \"select sum(number1) + sum(number2) + sum(number3) + sum(number4) as sum\n from big_scan where diff_type = '#{record_type}'\")['sum'].to_f\n end",
"def calculate(operation, column_name)\n operation = operation.to_s.downcase\n\n if @none\n case operation\n when \"count\", \"sum\"\n result = group_values.any? ? Hash.new : 0\n return @async ? Promise::Complete.new(result) : result\n when \"average\", \"minimum\", \"maximum\"\n result = group_values.any? ? Hash.new : nil\n return @async ? Promise::Complete.new(result) : result\n end\n end\n\n if has_include?(column_name)\n relation = apply_join_dependency\n\n if operation == \"count\"\n unless distinct_value || distinct_select?(column_name || select_for_count)\n relation.distinct!\n relation.select_values = [ klass.primary_key || table[Arel.star] ]\n end\n # PostgreSQL: ORDER BY expressions must appear in SELECT list when using DISTINCT\n relation.order_values = [] if group_values.empty?\n end\n\n relation.calculate(operation, column_name)\n else\n perform_calculation(operation, column_name)\n end\n end",
"def subtotal\n #SQL version of summing which is much faster than Ruby\n line_items.select(\"SUM(quantity * price) AS sum\")[0].sum\n # Ruby version of sum\n # line_items.to_a.sum {|item| item.total}\nend",
"def sum\n transactions.sum(:amount)\n end",
"def sum\n transactions.sum(:amount)\n end",
"def aggregate\n #response = Result.collection.map_reduce(self.map_fn(), _reduce(), :raw => true, :out => {:inline => true}, :query => {:execution_id => id})\n response = Result.where(execution_id: id).map_reduce(self.map_fn(), self.query.reduce).out(inline: true).raw()\n results = response['results']\n if results\n self.aggregate_result = {}\n results.each do |result|\n result = prettify_generated_result(result) if self.query.generated? && result['value']['rereduced']\n self.aggregate_result[result['_id']] = result['value']\n end\n save!\n end\n end",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def async_count(column_name = nil)\n async.count(column_name)\n end",
"def sum_create\n total = add_create.reduce(:+)\nend",
"def async_average(column_name)\n async.average(column_name)\n end",
"def sum(column)\n result = all\n\n if result.any?\n result.inject(0.0) do |acc, record|\n if value = record.public_send(column)\n acc += value\n end\n\n acc\n end\n end\n end",
"def result\n VoucherRow.joins(:voucher).where(:account_number => number, 'vouchers.activity_year_id' => activity_year_id, :canceled => false).sum(:sum).to_f\n end",
"def sum\n\t\treturn self.reduce(:+)\n\tend",
"def sum\n self.reduce('lambda{|sum, item| sum + item}')\n end",
"def total_tardies_for_all_students\n \"SELECT SUM(tardies) from students\"\nend",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def index\n sql = \"select s.data, sum(s.valor) as valor from saldos s group by s.data order by s.data desc\"\n @saldos = ActiveRecord::Base.connection.execute(sql).to_a\n end",
"def money_made\n # InvoiceItem.joins(invoices: [:transactions, :merchants])\n Merchant.joins(invoices: :transactions).joins(invoices: :invoice_items).where(\"invoices.merchant_id =?\", \"1\").where(transactions: {result: \"success\"}).sum('invoice_items.unit_price * invoice_items.quantity')\n Merchant.joins(invoices: :transactions).joins(invoices: :invoice_items).where(\"invoices.merchant_id =?\", \"1\").where(transactions: {result: \"success\"}).group(\"merchants.id\").select(\"merchants.*, sum(invoice_items.quantity)\")\nend",
"def sum\n self.inject(:+)\n end",
"def order_total\n if self.showtime_id.present?\n my_order = Order.where(showtime_id: self.showtime_id)\n #my_order_total = my_order.group(:showtime_id).sum(:order_quantity) \n my_order_total = my_order.sum(:order_quantity) + self.order_quantity # works\n #my_order_total = my_order.pluck(:order_quantity).inject(:+) + self.order_quantity\n #total = my_order.map {|o| o['order_quantity']}.reduce(0, :+)\n # total = my_order.inject(0) { |sum, self.order_quantity| sum + self.order_quantity }\n puts \"all-orders\" * 20\n puts my_order_total\n puts self\n puts \"showid\" * 50\n puts self.showtime_id # get the showtime_id for new order.\n return my_order_total \n else\n puts \"id is nil\"\n end\n end",
"def execute_simple_calculation(operation, column_name, distinct) #:nodoc:\n # LIMIT 0 clauses on aggregate queries will return a 0 result\n # no need to query salesforce for that\n return 0 if has_limit_or_offset? && limit_value == 0\n\n if operation == \"count\" && (column_name == :all && distinct || has_limit_or_offset?)\n # Shortcut when limit is zero.\n \n query_builder = build_count_subquery(spawn, column_name, distinct)\n else\n # PostgreSQL doesn't like ORDER BY when there are no GROUP BY\n relation = unscope(:order).distinct!(false)\n\n column = aggregate_column(column_name)\n select_value = operation_over_aggregate_column(column, operation, distinct)\n\n relation.select_values = [select_value]\n\n query_builder = relation.arel\n end\n\n result = skip_query_cache_if_necessary { @klass.connection.select_all(query_builder, nil) }\n row = result.first\n\n value = row && row.fetch(\"expr0\")\n\n type = type_for(column_name)\n \n type_cast_calculated_value(value, type, operation)\n end",
"def summ\n result = 0.0\n self.propose_items.each do |item|\n result += item.price\n end\n return result\n end",
"def sum_expense(expenses)\n sum = expenses.sum\n return sum\nend",
"def sequence_sum\n reduce(:+)\n end",
"def async_result()\n #This is a stub, used for indexing\n end",
"def sum(field = nil)\n block_given? ? super() : aggregates(field)[\"sum\"] || 0\n end",
"def running_total\n sum = 0.00\n @items.each { |item| sum += item[\"price\"] }\n return sum\n end",
"def sum\n reduce(0, &:+)\n end",
"def visit_axiom_aggregate_sum(sum)\n aggregate_function_sql(SUM, sum)\n end",
"def sum(project_id, event_collection, target_property, options = {})\n options[:event_collection] = event_collection\n options[:target_property] = target_property\n\n resource \"projects/#{project_id}/queries/sum\", options\n end",
"def query(&block)\n @delegate.query(block)\n end",
"def row_sum()\n return self.work_packets.sum( :worked_hours )\n end",
"def pending_sum\n\t\tpending.sum(\"amount * (artist_percentage / 100)\").to_f.round(2)\n\tend",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def recalc_totals_from_db!\n self.subtotal = self.time_entries.sum(:total_amount) + self.expense_entries.sum(:total_amount)\n self.paid = self.payment_allocates.sum(:amount)\n self.save!\n end",
"def value\n @collection.sum\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def total_points\n filtered_records.limit(nil).approved.sum('completed_tasks.points * COALESCE(completed_tasks.quantity, 1)')\n end",
"def pending_balance\n transactions.where(pending: true).sum(:amount)\n end",
"def revenue\n Invoice.joins(:invoice_items, :transactions)\n .where(merchant: self.id, 'transactions.result' => 'success')\n .sum('invoice_items.quantity * invoice_items.unit_price')\n end",
"def sum; end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def aVeryBigSum(ar)\n sum = ar.sum\nreturn sum\nend",
"def total_value\r\n return 0 if self.value.nil?\r\n self.value + self.expenses.includes(:expense).map{ |e|\r\n (e.quantity || 0) * (e.expense.signed_price || 0)\r\n }.sum\r\n end",
"def amount_sum_for(meth)\n entries.select(&meth).map{ |entry| entry.amount.to_i }.compact.inject(&:+) || 0\n end",
"def print_cals(db)\r\n total = db.execute(\"select sum(calories) as MyColumnSum from foodlist\")\r\n puts \"#{total}\"\r\n # puts \"Total calories is: #{cal_arr}\"\r\nend",
"def get_item\n CONNECTION.execute(\"SELECT *, quantity * item_cost as item_total FROM order_items WHERE id = '#{@id}';\")\n end",
"def budget\n accountings.where(:positive => true).sum :amount\n end",
"def parallel_plus\n api_get(API_MAP[:parallel_plus]).to_f\n end",
"def get_customer_payments_value(fecha1,fecha2,id)\n\n facturas = CustomerPayment.where([\" company_id = ? AND fecha1 >= ? and fecha1 <= ? and bank_acount_id = ?\", self.id, \"#{fecha1} 00:00:00\",\"#{fecha2} 23:59:59\" , id]).order(:id)\n ret = 0 \n if facturas \n ret=0 \n for factura in facturas \n ret += factura.total\n end\n end \n return ret \n end",
"def get_customer_payments_value(fecha1,fecha2,id)\n\n facturas = CustomerPayment.where([\" company_id = ? AND fecha1 >= ? and fecha1 <= ? and bank_acount_id = ?\", self.id, \"#{fecha1} 00:00:00\",\"#{fecha2} 23:59:59\" , id]).order(:id)\n ret = 0 \n if facturas \n ret=0 \n for factura in facturas \n ret += factura.total\n end\n end \n return ret \n end",
"def total\n sum(:total)\n end",
"def total_price\n total = 0\n self.transactions.each do |t|\n total += t.price\n end\n total\n end",
"def run_query(q)\n return sky_table.query(q)\n end",
"def query_result_set(query)\n\t result = ValueSet.new\n\t call(:query_result_set, query) do |marshalled_set|\n\t\tfor task in marshalled_set\n\t\t task = local_object(task)\n\t\t Distributed.keep.ref(task)\n\t\t result << task\n\t\tend\n\t end\n\n\t result\n\tend",
"def show\n\n\t@project = Project.find(params[:id])\n\t@promises = Promise.all\n\t@actual = Fund.where(:project_id => @project.id).sum(:amount)\n end",
"def sum(column_name)\n \traise ActiveRecord::StatementInvalid, \"Missing column name entered!\" if column_name.is_a? Integer\n \tbegin\n \t\tself.catalog_analytics.sum(column_name)\n \trescue ActiveRecord::StatementInvalid\n \t\traise ActiveRecord::StatementInvalid, \"Missing column name entered!\"\n \tend\t\n end",
"def quantity_product(type)\n self.items.includes(:dealing).where(\"dealings.type_deal\", type)\n # quantity_array = Product.find_by_sql(\"SELECT products.name SUM(quantity) FROM items \" +\n # \"JOIN products ON (items.product_id = products.id) \" +\n # \"JOIN dealings ON (items.dealing_id = dealings.id)\" +\n # \"WHERE dealings.type_deal = '#{type}' \" +\n # \"AND products.id = #{self.id};\")\n # puts \"!!!return from quantity_products self.id = #{self.id}\"\n # p quantity_array.first.sum\n # p quantity_array\n # quantity_array.first\n end",
"def index\n @q = Record.ransack(params[:q])\n @records = @q.result(distinct: true)\n @total_sales = @records.sum(:sales)\n end",
"def run(&block)\n @repository.query(self, &block)\n end",
"def result\n ActiveRecord::Base.connection.select_all(sql).entries\n end",
"def compute_sum(numbers)\n sum = 0\n numbers.each do |number|\n sum += number\n end\n return sum\nend",
"def compute_sum(numbers)\n sum = 0\n numbers.each do |number|\n sum += number\n end\n return sum\nend",
"def bi_total(_project, _group)\n if _group.nil?\n budget_items.joins(:budget).where(\"budgets.project_id in (?)\",_project).select('SUM(budget_items.amount) bi_t')\n else\n budget_items.joins(:budget,:charge_account).where(\"budgets.project_id in (?) AND charge_accounts.charge_group_id = ?\",_project, _group).select('SUM(budget_items.amount) bi_t')\n end\n end",
"def aVeryBigSum(ar)\n sum = 0\n ar.collect do |value|\n sum += value\n end\n return sum\nend",
"def return_sum\n @money = 0\n @transactions.each do |t|\n if t.get_action == 'buy'\n @money = @money + t.get_value\n elsif t.get_action == 'sell'\n @money = @money - t.get_value\n end\n end\n total_value_of_shares - @money\n end",
"def existing_clients_new_revenue \n existing_clients.where(new_money: true).sum(:first_year_comp)\nend",
"def total\n base_query.sum('max_candidates')\n end",
"def query(sql)\n if NB.neverblocking? && NB.reactor.running?\n send_query sql\n NB.wait(:read, IO.new(socket))\n get_result\n else\n super(sql)\n end\n end",
"def total_quantity\n total = 0.0\n self.quantities.each do |quantity|\n total = total + quantity.number\n end\n total\n end",
"def calories_burned(db, member_id)\n total_calories= []\n calories_person = db.execute(\"SELECT * FROM calories WHERE member_id='#{member_id}' \")\n calories_person.each do |cals|\n total_calories << cals['amt_burned']\n end \n total_calories.inject(:+)\nend",
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def query(sql, *bind_values)\n @pool.acquire { |conn| conn.query(sql, *bind_values).first }\n end",
"def sum(attr)\n col = column(attr)\n if col.empty?\n 0\n else\n col.reduce(&:+)\n end\n end",
"def order_total\n order_sum = 0\n Order.all.each do |order|\n order_sum += order.order_quantity.to_i\n end \n order_sum\n end",
"def get_customer_payments_value_customer(fecha1,fecha2,id,cliente,value)\nfacturas = CustomerPayment.find_by_sql(['Select DISTINCT ON (1) customer_payments.id,customer_payments.total,\nfacturas.code,facturas.customer_id,facturas.fecha,customer_payment_details.factory from customer_payment_details \nINNER JOIN facturas ON customer_payment_details.factura_id = facturas.id\nINNER JOIN customer_payments ON customer_payments.id = customer_payment_details.customer_payment_id \nWHERE customer_payments.fecha1 >= ? and customer_payments.fecha1 <= ? and \ncustomer_payments.bank_acount_id = ? and facturas.customer_id = ?',\n \"#{fecha1} 00:00:00\",\"#{fecha2} 23:59:59\",id,cliente ])\n ret = 0 \n\n if facturas \n ret=0 \n for d in facturas \n if (value == \"total\")\n ret += d.total \n \n end\n end \n end \n return ret \n end",
"def get_customer_payments_value_customer(fecha1,fecha2,id,cliente,value)\nfacturas = CustomerPayment.find_by_sql(['Select DISTINCT ON (1) customer_payments.id,customer_payments.total,\nfacturas.code,facturas.customer_id,facturas.fecha,customer_payment_details.factory from customer_payment_details \nINNER JOIN facturas ON customer_payment_details.factura_id = facturas.id\nINNER JOIN customer_payments ON customer_payments.id = customer_payment_details.customer_payment_id \nWHERE customer_payments.fecha1 >= ? and customer_payments.fecha1 <= ? and \ncustomer_payments.bank_acount_id = ? and facturas.customer_id = ?',\n \"#{fecha1} 00:00:00\",\"#{fecha2} 23:59:59\",id,cliente ])\n ret = 0 \n\n if facturas \n ret=0 \n for d in facturas \n if (value == \"total\")\n ret += d.total \n \n end\n end \n end \n return ret \n end",
"def total(rows)\n rows.map { |row| row[\"Money we made\"].to_i }.reduce(:+)\nend",
"def find_sum\n self.inject(0) { |sum, x| sum + x.to_f }\n end",
"def sum_column(column_name)\n @result.inject(0.0) { |sum, item_hash|\n if item_hash.has_key?(column_name)\n sum + item_hash[column_name].to_f\n else\n sum\n end\n }\n end",
"def fetch_rows(sql, opts=OPTS, &block)\n db.execute(sql){|result| process_result_set(result, opts, &block)}\n self\n end",
"def run_promise(&block)\n Concurrent::Promise.new do\n @connection ||= Faraday.new(:url => base_uri, :ssl => MsRest.ssl_options) do |faraday|\n middlewares.each{ |args| faraday.use(*args) } unless middlewares.nil?\n faraday.adapter Faraday.default_adapter\n logging = ENV['AZURE_HTTP_LOGGING'] || log\n if logging\n faraday.response :logger, nil, { :bodies => logging == 'full' }\n end\n end\n\n loop do\n @response = @connection.run_request(:\"#{method}\", build_path, body, {'User-Agent' => user_agent}.merge(headers)) do |req|\n req.params = req.params.merge(query_params.reject{|_, v| v.nil?}) unless query_params.nil?\n yield(req) if block_given?\n end\n\n break if ((@response.status != 429) || (@response.status == 429 && @response.headers['retry-after'].nil?))\n\n if(@response.status == 429 && !@response.headers['retry-after'].nil?)\n sleep(@response.headers['retry-after'].to_i)\n end\n end\n @response\n end\n end",
"def category_totals(db, user_name, number)\r\n\tretrieve_totals = '\r\n\tSELECT categories.name, SUM(amount) FROM expenses\r\n\tJOIN users ON expenses.user_id = users.id\r\n\tJOIN categories ON expenses.category_id = categories.id\r\n\tWHERE categories.id = ?\r\n\tAND users.name = ?'\r\n\ttotals = db.execute(retrieve_totals, [number, user_name])[0]\r\nend",
"def get_value_of_shelf\n value = DATABASE.execute(\"SELECT price, quantity FROM products WHERE shelf_id = #{@id}\")\n \n value.each do |x|\n shelf_value += x[:quantity] * x[:price]\n end\n shelf_value\n end",
"def calculate_total(database, user_id)\n\ttotal = 0\n\titem_costs = database.execute(\n\t\t\"SELECT *\n\t\tFROM purchases\n\t\tWHERE login_id='#{user_id}';\"\n\t\t)\n\t\titem_costs.each do |cost|\n\t\t\ttotal += cost[3]\n\t\tend\n\t\ttotal\nend",
"def total\n votes.sum(:value)\n end",
"def sum_multiples(limit)\n multiples = collect_multiples(limit)\n # return multiples.sum\n return multiples.inject(0){|sum,n| sum + n }\nend",
"def total_sum\n rows.collect(&:sum_with_vat).sum\n end",
"def breastfed_quantity_today \n breastfeedings.where(\"created_at >= :start_at AND created_at <= :end_date\", {start_at: Time.now.beginning_of_day, end_date: Time.now}).sum(:quantity) \n end",
"def sum(field)\n sum = documents.inject(nil) do |memo, doc|\n value = doc.send(field)\n memo ? memo += value : value\n end\n end",
"def async_lookup(args)\n unless (Hash === args)\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all do |rows|\n yield(rows.any? ? rows.first : nil)\n end\n\n return\n end",
"def sum arr\n sum = arr.sum\n return sum\nend",
"def execute\n # build the query string\n # run the query\n # return the results\n end",
"def total_on(date)\n total = (items.sum :value, :conditions=>{:date=>date}) || 0\n total += value_unpaid_tasks_on(date)\n end",
"def get_sum(a, b)\n [a,b].reduce(&:+)\nend",
"def sum_array ary\n sleep(2)\n sum = 0\n ary.each do |val|\n sum += val\n end\n sum\nend",
"def compute_total_value\n @value= @net_price * @count\n end",
"def customer_funds\n sql = \"SELECT funds FROM customers WHERE customers.id = $1\"\n # \"SELECT funds FROM tickets INNER JOIN customers ON tickets.customer_id = customers.id WHERE customer_id = $1\"\n values = [@customer_id]\n return SqlRunner.run(sql, values)[0]['funds'].to_i\nend"
] | [
"0.591459",
"0.58025765",
"0.5746341",
"0.56798476",
"0.56798476",
"0.56158334",
"0.55061966",
"0.545525",
"0.54466367",
"0.5435918",
"0.54290265",
"0.53577954",
"0.53356",
"0.53315973",
"0.5273493",
"0.5221661",
"0.5211997",
"0.5177458",
"0.5129632",
"0.51146936",
"0.5112505",
"0.5104533",
"0.51039904",
"0.5096499",
"0.5071344",
"0.50645125",
"0.50255644",
"0.50087756",
"0.50084335",
"0.4960578",
"0.49559212",
"0.49461403",
"0.49444503",
"0.49390057",
"0.49261498",
"0.49176618",
"0.49020314",
"0.49020314",
"0.49020314",
"0.48962858",
"0.48902172",
"0.48860553",
"0.48858112",
"0.48788318",
"0.48788053",
"0.48705268",
"0.48663485",
"0.48632324",
"0.48601338",
"0.48515528",
"0.4843316",
"0.48410404",
"0.48410404",
"0.48230904",
"0.48192745",
"0.4819031",
"0.48147553",
"0.48054677",
"0.4805033",
"0.48036546",
"0.4802088",
"0.47994626",
"0.47974402",
"0.47965285",
"0.47965285",
"0.47919938",
"0.47804266",
"0.47795975",
"0.4776728",
"0.47763264",
"0.4775419",
"0.47723854",
"0.4771304",
"0.47661373",
"0.4765026",
"0.4758973",
"0.47582397",
"0.47576967",
"0.47576967",
"0.47530413",
"0.47504392",
"0.47457102",
"0.47441933",
"0.47437406",
"0.47433394",
"0.47419629",
"0.47343352",
"0.47313163",
"0.47312117",
"0.47309527",
"0.47261822",
"0.4723822",
"0.47225758",
"0.4722164",
"0.4717702",
"0.47141415",
"0.47136712",
"0.47114465",
"0.47076234",
"0.47060198"
] | 0.74440587 | 0 |
This calculates aggregate values in the given column. Methods for count, sum, average, minimum, and maximum have been added as shortcuts. Person.calculate(:count, :all) The same as Person.count Person.average(:age) SELECT AVG(age) FROM people... Selects the minimum age for any family without any minors Person.group(:last_name).having("min(age) > 17").minimum(:age) Person.sum("2 age") There are two basic forms of output: Single aggregate value: The single value is type cast to Integer for COUNT, Float for AVG, and the given column's type for everything else. Grouped values: This returns an ordered hash of the values and groups them. It takes either a column name, or the name of a belongs_to association. values = Person.group('last_name').maximum(:age) puts values["Drake"] => 43 drake = Family.find_by(last_name: 'Drake') values = Person.group(:family).maximum(:age) Person belongs_to :family puts values[drake] => 43 values.each do |family, max_age| ... end | def calculate(operation, column_name)
operation = operation.to_s.downcase
if @none
case operation
when "count", "sum"
result = group_values.any? ? Hash.new : 0
return @async ? Promise::Complete.new(result) : result
when "average", "minimum", "maximum"
result = group_values.any? ? Hash.new : nil
return @async ? Promise::Complete.new(result) : result
end
end
if has_include?(column_name)
relation = apply_join_dependency
if operation == "count"
unless distinct_value || distinct_select?(column_name || select_for_count)
relation.distinct!
relation.select_values = [ klass.primary_key || table[Arel.star] ]
end
# PostgreSQL: ORDER BY expressions must appear in SELECT list when using DISTINCT
relation.order_values = [] if group_values.empty?
end
relation.calculate(operation, column_name)
else
perform_calculation(operation, column_name)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sum(column)\n result = all\n\n if result.any?\n result.inject(0.0) do |acc, record|\n if value = record.public_send(column)\n acc += value\n end\n\n acc\n end\n end\n end",
"def sum_column(column_name)\n @result.inject(0.0) { |sum, item_hash|\n if item_hash.has_key?(column_name)\n sum + item_hash[column_name].to_f\n else\n sum\n end\n }\n end",
"def calculate_score\n @results.collect { |mir| mir.send(@column.to_sym) }.sum\n end",
"def average(column_name)\n calculate(:average, column_name)\n end",
"def average(column)\n if s = sum(column)\n s / _all_with_present_column(column).count.to_f\n end\n end",
"def aggregate_values(rows)\n # Convert rows into hash where each key is a column name and the each\n # value is an array of values for that column\n cols = OrderedHash.new\n rows.each do |row|\n row.each do |k,v|\n cols[k] ||= []\n cols[k] << v\n end\n end\n\n # Loop through each column, applying an aggregate proc if one exists\n # to the array of column values. If a proc does not exist we take the\n # last value from the array.\n result = cols.inject(OrderedHash.new) do |hsh, (col, vals)|\n hsh[col] = if @aggregators[col]\n @aggregators[col].call(vals)\n else\n vals.last\n end\n hsh\n end\n\n Row[result]\n end",
"def aggregate(term = \"COUNT(*)\", options = {})\n # Leave this .dup here, causes problems because options are changed\n options = options.dup\n\n klass = options[:class]\n\n # Rename search term, SQL92 but _not_ SQL89 compatible\n options = {\n :select => \"#{term} AS #{term[/^\\w+/]}\"\n }.update(options)\n\n unless options[:group] || options[:group_by]\n options.delete(:order)\n options.delete(:order_by)\n end\n sql = resolve_options(klass, options)\n\n if field = options[:field]\n return_type = klass.ann(field, :class) || Integer\n else\n return_type = Integer\n end\n\n if options[:group] || options[:group_by]\n # This is an aggregation, so return the calculated values\n # as an array.\n values = []\n res = query(sql)\n res.each_row do |row, idx|\n values << type_cast(return_type, row[0])\n end\n return values\n else\n return type_cast(return_type, query(sql).first_value)\n end\n end",
"def sum(column_name)\n \traise ActiveRecord::StatementInvalid, \"Missing column name entered!\" if column_name.is_a? Integer\n \tbegin\n \t\tself.catalog_analytics.sum(column_name)\n \trescue ActiveRecord::StatementInvalid\n \t\traise ActiveRecord::StatementInvalid, \"Missing column name entered!\"\n \tend\t\n end",
"def sum(field)\n grouped(:sum, field.to_s, Javascript.sum)\n end",
"def execute_simple_calculation(operation, column_name, distinct) #:nodoc:\n # LIMIT 0 clauses on aggregate queries will return a 0 result\n # no need to query salesforce for that\n return 0 if has_limit_or_offset? && limit_value == 0\n\n if operation == \"count\" && (column_name == :all && distinct || has_limit_or_offset?)\n # Shortcut when limit is zero.\n \n query_builder = build_count_subquery(spawn, column_name, distinct)\n else\n # PostgreSQL doesn't like ORDER BY when there are no GROUP BY\n relation = unscope(:order).distinct!(false)\n\n column = aggregate_column(column_name)\n select_value = operation_over_aggregate_column(column, operation, distinct)\n\n relation.select_values = [select_value]\n\n query_builder = relation.arel\n end\n\n result = skip_query_cache_if_necessary { @klass.connection.select_all(query_builder, nil) }\n row = result.first\n\n value = row && row.fetch(\"expr0\")\n\n type = type_for(column_name)\n \n type_cast_calculated_value(value, type, operation)\n end",
"def summarize(data, summarization_method, variable=nil)\n case summarization_method\n when 'sum'\n data = data.sum(variable)\n when 'max'\n data = data.maximum(variable)\n when 'count'\n data = data.count(variable)\n end\n end",
"def aggregate\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :count => 0 },\n :reduce => Javascript.aggregate\n )\n end",
"def aggregate(property, resolution)\n # Look up the date/time dimensions for the resolution.\n date_time_dimensions = date_time_dimensions_for_resolution(resolution)\n\n # Build the timestamp from the date/time dimensions.\n timestamp = Sequel::SQL::NumericExpression.new(:+, *date_time_dimensions).cast(:timestamp).as(:timestamp)\n\n # Build a window function to sum the counts.\n count_window_function = Sequel::SQL::Function.new(:sum, :count).over(partition: date_time_dimensions).as(:count)\n\n # Build the aggregation window functions.\n aggregation_window_functions = AGGREGATIONS.map do |aggregation|\n Sequel::SQL::Function.new(aggregation, :\"#{property}\").over(partition: date_time_dimensions).as(:\"#{aggregation}_#{property}\")\n end\n\n facts_dataset\n .join(:dimension_dates, date: Sequel.cast(:timestamp, :date))\n .join(:dimension_times, time: Sequel.cast(:timestamp, :time))\n .distinct(*date_time_dimensions)\n .select(timestamp, count_window_function, *aggregation_window_functions)\n end",
"def calculate_aggregate(cells)\n value = Array.new(cells.first.value.length, 0)\n cells.each do |c|\n c.value.each_with_index do |v, index|\n value[index] += v\n end\n end\n value\n end",
"def aggregates\n Rails.cache.fetch(\"aggregates_#{interval}_#{cache_time}\", expires_in: self.cache_time) {\n ActiveRecord::Base.connection.exec_query(\"\n select\n stddev(sum_downvotes) as stddev,\n sum(sum_downvotes) as sum,\n avg(sum_downvotes) as avg,\n avg(n_comments) as n_comments,\n count(*) as n_commenters\n from (\n select\n sum(downvotes) as sum_downvotes,\n count(*) as n_comments\n from comments join users on comments.user_id = users.id\n where\n (comments.created_at >= '#{period}') and\n users.banned_at is null and\n users.deleted_at is null\n GROUP BY comments.user_id\n ) sums;\n \").first.symbolize_keys!\n }\n end",
"def maximum(column_name)\n calculate(:maximum, column_name)\n end",
"def column_average(input_query, key)\n output = input_query.average(key)\n number_with_precision(output, precision: 1)\n end",
"def sum(attr)\n col = column(attr)\n if col.empty?\n 0\n else\n col.reduce(&:+)\n end\n end",
"def visit_axiom_aggregate_sum(sum)\n aggregate_function_sql(SUM, sum)\n end",
"def aggregate\n #response = Result.collection.map_reduce(self.map_fn(), _reduce(), :raw => true, :out => {:inline => true}, :query => {:execution_id => id})\n response = Result.where(execution_id: id).map_reduce(self.map_fn(), self.query.reduce).out(inline: true).raw()\n results = response['results']\n if results\n self.aggregate_result = {}\n results.each do |result|\n result = prettify_generated_result(result) if self.query.generated? && result['value']['rereduced']\n self.aggregate_result[result['_id']] = result['value']\n end\n save!\n end\n end",
"def column_sum(input_query, key)\n input_query.sum(key)\n end",
"def aggregate(value)\n @query_hash[AGGREGATE][value] = value\n self\n end",
"def statement\n case @metric.aggregate\n when :sum\n parts = {\n select: select_statement,\n joins: dimension_joins,\n having: having_statement,\n order: order_by_statement\n }\n\n statement = ([model] + parts.keys).inject do |chain, method|\n chain.public_send(method, parts[method])\n end\n\n statement = process_scope_dimension_filter(statement)\n statement = process_lambda_dimension_filter(statement)\n statement = process_ransack_dimension_filter(statement)\n\n # The original gem did not handle has_many relationships. In order to support\n # has_many, we need to first do an inner query to select out distinct rows _before_\n # attempting the sum. Therefore we build up the query piece\n # by piece rather than using the basic statement.\n\n sum_definition = parts[:select].first\n original_columns = parts[:select].drop(1)\n\n # Collect a list of all renamed columns from the original query so that we can include\n # these in the outer query.\n renamed_columns = []\n original_columns.each do |sel|\n renamed_columns << sel.split(' AS ').last\n end\n\n # In some situations the column we're summing over is not included as a part of the aggregation\n # in the inner query. In such cases we must explicitly select the desired column in the inner\n # query, so that we can sum over it in the outer query.\n summation_metric = if select_aggregate.include?(\"CASE\")\n select_aggregate.split('CASE WHEN ').last.split(' ').first\n else\n ''\n end\n\n outer_columns = ([sum_definition] << renamed_columns).flatten.uniq.join(', ')\n inner_columns = (original_columns << [summation_metric, fact_model.measure.to_s]).flatten.uniq.reject(&:blank?).join(', ').remove(\"\\n\").squeeze(' ')\n inner_from = statement.to_sql.split('FROM').last\n group_by = outer_group_by_statement.join(', ')\n\n # Finally, construct the query we want and return it as a string\n full_statement = \"SELECT #{outer_columns} FROM(SELECT #{distinct}, #{inner_columns} FROM #{inner_from}) AS T\"\n\n # Add the GROUP BY clause only if it's non nil and non empty\n full_statement = \"#{full_statement} GROUP BY #{group_by}\" if group_by.present?\n\n full_statement\n\n else\n parts = {\n select: select_statement,\n joins: dimension_joins,\n group: group_by_statement,\n having: having_statement,\n order: order_by_statement\n }\n\n statement = ([model] + parts.keys).inject do |chain, method|\n chain.public_send(method, parts[method])\n end\n\n statement = process_scope_dimension_filter(statement)\n statement = process_lambda_dimension_filter(statement)\n statement = process_ransack_dimension_filter(statement)\n\n statement.to_sql\n end\n end",
"def aggregates\n @aggregates\n end",
"def sum_by(attribute)\n @legs.pluck(attribute).select{|i| i.is_a?(Numeric)}.reduce(&:+)\n end",
"def run_aggregation\n GRADES.each_with_index do |grade, idx|\n classifier[grade].each_pair do |metric, values|\n all_values = values\n all_values += classifier[GRADES[idx + 1]][metric] if (idx + 1) < GRADES.count\n\n classifier[grade][metric] =\n if all_values.count <= 2\n values.max || 0\n else\n (all_values.sum / all_values.count).round(2)\n end\n end\n end\n end",
"def calculate_function(function)\n raise \"invalid function '#{function}'\" unless [:sum, :avg, :min, :max, :count].include?(function.to_sym)\n Sequel::SQL::Function.new(function.to_sym, :value)\n end",
"def values_for(attribute,options = {})\n find(:all,{:select => \"#{table_name}.#{attribute}, COUNT(#{table_name}.#{attribute}) as cnt\", :group => \"#{table_name}.#{attribute}\",:order => 'cnt DESC', :having => options[:min_count] ? \"cnt >= #{options[:min_count]}\" : nil}.merge(options.only(:conditions,:joins))).map{ |x| [x.read_attribute(attribute), x.cnt.to_i] }\n end",
"def aggregated_fact_column_sql\n aggregate_fields.collect { |c| \n \"#{c.strategy_name}(#{c.from_table_name}.#{c.name}) AS #{c.label_for_table}\"\n }.join(\",\")\n end",
"def point_estimate(column)\n col_type = schema.type column\n Veritable::Util.check_datatype(col_type, \"Point estimate -- \")\n if col_type == 'boolean' or col_type == 'categorical'\n # use the mode\n (counts(column).max_by {|k, v| v})[0]\n elsif col_type == 'real' or col_type == 'count'\n # use the mean\n values = distribution.collect {|row| row[column]}\n mean = (values.inject(0) {|memo, obj| memo + obj}) / values.size.to_f\n col_type == 'real' ? mean : mean.round.to_i\n end\n end",
"def aggregates\n self.class.instance_variable_get(:@aggregates) || {}\n end",
"def calculate\n self.total\n end",
"def total_by_group_for(column, only_visible=true)\n ################\n # Smile specific : cache\n return nil unless column\n\n #-----------------------------\n # Smile specific : debug trace\n debug = nil\n if self.respond_to?('debug')\n debug = self.debug\n end\n\n if debug\n start = Time.now\n end\n\n @total_by_group_for_by_column ||= {true => {}, false => {}}\n\n if @total_by_group_for_by_column[only_visible][column.name]\n logger.debug \"==>prof #{@indent_spaces}from CACHE total_for(#{column.name}, ...#{', NOT only_visible' unless only_visible})\" if debug\n\n return @total_by_group_for_by_column[only_visible][column.name]\n end\n # END -- Smile specific : cache\n #######################\n\n ################\n # Smile specific : manage composite Issue Columns\n # TODO add hook for total_for_bar\n if self.respond_to?('total_for_bar')\n logger.debug \"\\\\=>prof total_for_bar\" if debug == '3'\n total_for_bar(column, only_visible, @total_by_group_for_by_column, :total_by_group_for)\n logger.debug \"/=>prof total_for_bar\" if debug == '3'\n end\n\n if @total_by_group_for_by_column[only_visible][column.name]\n logger.debug \" =>prof column #{column.name}/only_visible=#{only_visible} in CACHE\" if debug == '2'\n return @total_by_group_for_by_column[only_visible][column.name]\n end\n # END -- Smile specific : manage composite Issue Columns\n #######################\n\n if debug\n logger.debug \" =>prof\"\n logger.debug \"\\\\=>prof #{@indent_spaces}total_by_group_for(#{column.name}#{', NOT only_visible' unless only_visible}) NATIVE\"\n end\n\n ################\n # Smile specific : cache\n # Smile comment : UPSTREAM CODE\n @total_by_group_for_by_column[only_visible][column.name] = grouped_query do |scope|\n ################\n # Smile specific : + param only_visible\n total_with_scope(column, scope, only_visible)\n end\n\n #-----------------------------\n # Smile specific : debug trace\n if debug\n logger.debug \"/=>prof #{@indent_spaces}total_by_group_for(#{column.name}#{', NOT only_visible' unless only_visible}) -- #{format_duration(Time.now - start, true)}\"\n end\n\n ################\n # Smile specific : cache\n @total_by_group_for_by_column[only_visible][column.name]\n end",
"def sum(attribute_name, options = {})\n calculate(:sum, attribute_name, options)\n end",
"def aggregate\n counts = {}\n group.each_pair { |key, value| counts[key] = value.size }\n counts\n end",
"def calculate_column_names(method)\n column_names = AllTypesObject.column_names\n case method\n when 'calculate', 'count'\n column_names = [nil] + column_names\n when 'average', 'sum'\n column_names = column_names.reject { |a| ['binary_col', 'boolean_col', 'date_col', 'datetime_col', 'string_col', 'text_col', 'timestamp_col', 'created_at', 'updated_at'].include?(a) }\n when 'maximum', 'minimum'\n column_names = column_names.reject { |a| ['binary_col', 'boolean_col'].include?(a) }\n end\n column_names\n end",
"def aggregate name, o, collector\n collector << \"#{name}(\"\n if o.distinct\n collector << \"DISTINCT \"\n end\n collector = inject_join(o.expressions, Arel::Collectors::Sunstone.new, \", \")# << \")\"\n if o.alias\n collector << \" AS \"\n visit o.alias, collector\n else\n collector\n end\n end",
"def total\n base_query.sum('max_candidates')\n end",
"def metric_value\n\t\tresult = ActiveRecord::Base.connection.execute self.query\n\t\treturn nil if result.blank? || result.first.blank? || result.first.keys.blank?\n\t\treturn result.first[result.first.keys.first] # is there a better way to do this?\n\tend",
"def avg(field)\n aggregates(field)[\"avg\"]\n end",
"def aggregate op, type = :fixnum\n check_closed\n\n aggregation_impl op, type\n end",
"def calc_query(query, table, aggregate = {}, as=nil)\n\tquery = \"select #{query}\"\n\n\tif !aggregate.empty?\n\t\taggregate.each do |operation, column|\n\t\t\tquery += \"#{operation}(#{column}) \"\n\t\t\tquery += as \" #{as}\"\n\t\tend\n\tend\n\n\tquery += \" from #{table}\"\n\n\treturn query\nend",
"def max(field)\n grouped(:max, field.to_s, Javascript.max)\n end",
"def total\n Float(@values.values.reduce(:+))\n end",
"def count_by column_name, field\n inc = increment_mask_for column_name, field\n only = only_mask_for column_name, field\n # Create super-special-bitfield-grouping-query w/ AREL\n sql = arel_table.\n project(\"count(#{primary_key}) as #{field}_count, (#{column_name} & #{only})/#{inc} as #{field}\").\n group(field).to_sql\n connection.send :select, sql, 'AREL' # Execute the query\n end",
"def odb_sum\n \"sum(#{to_s})\"\n end",
"def score_aggregate_node( node, normalized, z_score = true )\n aggregation = Hash.new\n case node['operation']\n when 'SUM'\n aggregation = sum_aggregation( node, normalized )\n when 'DIVIDE'\n aggregation = divide_aggregation( node, normalized )\n when 'DIFFERENCE'\n aggregation = subtract_aggregation( node, normalized )\n when 'MULTIPLY'\n aggregation = multiply_aggregation( node, normalized )\n end\n if z_score\n z_scores = z_score(aggregation)\n normalized.keys.each do |key|\n normalized[key][node['id']] = node['weight'] * z_scores[key]\n end\n normalized\n else\n aggregation\n end\n end",
"def sum col_name,field2sum,*fields, &block\n key = key_fields [field2sum] + fields\n @stats[key] = [] unless @stats[key]\n @stats[key] << SumField.new(col_name,block)\n end",
"def sum(initial_value_or_column = 0, &block)\n if block_given?\n map(&block).sum(initial_value_or_column)\n else\n calculate(:sum, initial_value_or_column)\n end\n end",
"def value()\n total = 0\n\n self.counts.values.each do |a|\n total += a[\"total\"]\n end\n\n total\n end",
"def summarize(group_by, properties=[], query=nil, options={})\n rv = []\n\n fields = ([group_by]+[*properties]).compact.collect{|field|\n self.resolve_field(field)\n }.reverse\n\n # pop current field off the stack\n current_field = fields.pop()\n\n # perform query, only return facets (no documents)\n results = self.search({\n :facets => {\n :counts => {\n :facet_filter => (query.nil? ? nil : self.to_elasticsearch_query(query)),\n :terms => {\n :field => current_field,\n :size => (options[:limit].nil? ? DEFAULT_MAX_FACETS : options[:limit])\n },\n :global => true\n }.compact\n }\n }, {\n :limit => 0,\n :raw => true\n })\n\n # if we got anything...\n unless results.get('facets.counts.terms').nil?\n results.get('facets.counts.terms', []).each do |facet|\n rv << {\n :id => facet['term'],\n :field => self.unresolve_field(current_field),\n :count => facet['count'],\n :children => (fields.empty? ? nil :\n # we need to go deeper...\n self.summarize(fields[0], fields[1..-1], [query, \"#{self.unresolve_field(current_field)}/#{facet['term']}\"].compact.join('/'))\n )\n }.compact\n end\n\n # add in empty results as nulls\n if results.get('facets.counts.missing', 0) > 0\n rv << {\n :id => nil,\n :field => self.unresolve_field(current_field),\n :count => results.get('facets.counts.missing'),\n :children => (fields.empty? ? nil :\n # we need to go deeper...\n self.summarize(fields[0], fields[1..-1], [query, \"#{self.unresolve_field(current_field)}/null\"].compact.join('/'))\n )\n }\n end\n end\n\n return rv\n end",
"def sum(field = nil)\n block_given? ? super() : aggregates(field)[\"sum\"] || 0\n end",
"def record_sum(session, record_type, database)\n session.send(database).select_one( \\\n \"select sum(number1) + sum(number2) + sum(number3) + sum(number4) as sum\n from big_scan where diff_type = '#{record_type}'\")['sum'].to_f\n end",
"def build_aggregate_sql(column_mask)\n dimension_column_names = dimensions_to_columns.collect do |c|\n \"#{c.table_alias}.#{c.name}\"\n end\n\n sql = <<-SQL\n SELECT\n #{mask_columns_with_null(dimension_column_names, column_mask).join(\",\")},\n #{aggregated_fact_column_sql}\n FROM #{tables_and_joins}\n SQL\n\n group = mask_columns_with_null(dimension_column_names, column_mask).reject{|o| o == 'null'}.join(\",\")\n sql += \"GROUP BY #{group}\" if !group.empty?\n sql\n end",
"def aggregate_sum(aggr)\n sum = {}\n aggr.each do |ts, counterVals|\n sum[ts] = {} unless sum.has_key?ts\n counterVals.each do |obj, count|\n if obj.respond_to?(:enterprise_id)\n eid = obj.public_send(:enterprise_id).to_s\n sum[ts][eid] = sum[ts].fetch(eid, 0) + count\n end\n end\n end\n sum\n end",
"def get_form_summarize_methods\n [\n [\"count\"],\n [\"sum\"],\n [\"maximum\"],\n [\"minimum\"]\n ]\n end",
"def aggregate_fields\n cube_class.aggregate_fields\n end",
"def total\n sum(:total)\n end",
"def extract_values(records, field)\n summarize_field(records, field).keys\nend",
"def calculate_observation(observation_hash, measure, population_set_key)\n key = population_set_key\n return unless observation_hash[key]\n\n # calculate the aggregate observation based on the aggregation type\n # aggregation type is looked up using the statement_name\n observation_hash[key].each do |population, observation_map|\n next unless observation_map[:statement_name]\n\n pop_set = measure.population_set_for_key(key).first\n popset_index = measure.population_sets_and_stratifications_for_measure.find_index do |population_set|\n pop_set[:population_set_id] == population_set[:population_set_id]\n end\n # find observation that matches the statement_name\n\n observation = pop_set.observations.select { |obs| obs.observation_parameter.statement_name == observation_map[:statement_name] }[popset_index]\n # Guidance for calculations can be found here\n # https://www.hl7.org/documentcenter/public/standards/vocabulary/vocabulary_tables/infrastructure/vocabulary/ObservationMethod.html#_ObservationMethodAggregate\n case observation.aggregation_type\n when 'COUNT'\n @measure_result_hash[measure.hqmf_id][key]['observations'][population] = { value: count(observation_map[:values].map(&:value)),\n method: 'COUNT', hqmf_id: observation.hqmf_id }\n when 'MEDIAN'\n median_value = median(observation_map[:values].map(&:value).compact)\n @measure_result_hash[measure.hqmf_id][key]['observations'][population] = { method: 'MEDIAN', hqmf_id: observation.hqmf_id,\n value: median_value }\n when 'SUM'\n @measure_result_hash[measure.hqmf_id][key]['observations'][population] = { value: sum(observation_map[:values].map(&:value)),\n method: 'SUM', hqmf_id: observation.hqmf_id }\n when 'AVERAGE'\n @measure_result_hash[measure.hqmf_id][key]['observations'][population] = { value: mean(observation_map[:values].map(&:value)),\n method: 'AVERAGE', hqmf_id: observation.hqmf_id }\n end\n end\n end",
"def groupby_standardcost\n { 'std_cost' => { '$sum' => '$standard_cost' } } \n end",
"def average_population\n\n sum = 0\n count = 0\n\n @query.get_graph.each_key do |city|\n population = get_city_info(city,\"population\")\n sum+=population\n count+=1\n end\n\n return sum/count\n\n end",
"def analyze_columns\n # digest the values for each Column\n columns.each {|c| c.crunch}\n\n assign_column_roles\n\n if columns.find_all {|col| col.measure? || col.role == :measure}.empty?\n # no measure columns, create a placeholder\n @measure = Measure.new(:name => \"Unknown\")\n end\n end",
"def amount_sum_for(meth)\n entries.select(&meth).map{ |entry| entry.amount.to_i }.compact.inject(&:+) || 0\n end",
"def total(rows)\n rows.map { |row| row[\"Money we made\"].to_i }.reduce(:+)\nend",
"def compute_denormalized_values(force_all = false)\n self.class.denormalized_attribute_names.each do |attribute_name|\n if force_all || denormalized_value_unset?(attribute_name) || denormalized_value_stale?(attribute_name)\n if !self.respond_to?(self.class.denormalized_compute_method_name(attribute_name))\n raise \"Could not find method #{self.class.denormalized_compute_method_name(attribute_name)} in class #{self.class.name}\"\n else\n compute_denormalized_value(attribute_name)\n end\n end\n end\n end",
"def summarize_per_table_dum\n @having = NO_ROWS\n end",
"def max(attr)\n column(attr).max\n end",
"def counts(column)\n cts = Hash.new\n distribution.each {|row|\n if row.has_key? column\n cat = row[column]\n if not (cts.has_key? cat)\n cts[cat] = 0\n end\n cts[cat] += 1\n end\n }\n cts\n end",
"def print_cals(db)\r\n total = db.execute(\"select sum(calories) as MyColumnSum from foodlist\")\r\n puts \"#{total}\"\r\n # puts \"Total calories is: #{cal_arr}\"\r\nend",
"def get_db_aggregation\n db_data_all = []\n aggregation = @thermostat.readings.pluck('Avg(temperature)', 'Min(temperature)', 'Max(temperature)', 'Avg(humidity)', 'Min(humidity)', 'Max(humidity)', 'Avg(battery_charge)', 'Min(battery_charge)', 'Max(battery_charge)').first\n unless aggregation.empty?\n db_data_all << {\"temperature\" => {\"avg\" => aggregation[0].round(2), \"min\" => aggregation[1], \"max\" => aggregation[2]}}\n db_data_all << {\"humidity\" => {\"avg\" => aggregation[3].round(2), \"min\" => aggregation[4], \"max\" => aggregation[5]}}\n db_data_all << {\"battery_charge\" => {\"avg\" => aggregation[6].round(2), \"min\" => aggregation[7], \"max\" => aggregation[8]}}\n end\n return db_data_all\n end",
"def count(column = nil)\n return super unless column && first && first.class.respond_to?(:descends_from_active_record?)\n column = column.to_sym unless column.is_a?(Symbol) \n inject( 0 ) { |sum, x| x[column].nil? ? sum : sum + 1 }\n end",
"def get_tally_by_flight_column_name(column_name)\n output = 0\n all_flights.each do |flight_obj|\n output += flight_obj.send(column_name)\n end\n return output\n end",
"def statistics\n find(:all, :select => \"title, count(*) AS count, sum(amount) AS sum, avg(amount) AS avg\", :group => :title).map{|stat| stat.attributes}\n end",
"def group_by(column)\n @conjunction.add_group_by(column)\n nil\n end",
"def max_value(table_name, column_name)\n result = @client.query(\"SELECT MAX(`#{column_name}`) FROM `#{table_name}`;\")\n\n result.first.values[0] || 0\n end",
"def sum\n\t\tprintf(\"%02i.%i\\n\", @options[:monat], @options[:jahr])\n\t\t@db.execute(\"select summe, gemeinsam from sum_#{@options[:name]} where jahr = #{@options[:jahr]} and monat = #{@options[:monat]} \") do |row|\n\t\t\tprintf(\"(%s) % .2f EUR \\n\", row[1], row[0])\n\t\tend\n\tend",
"def frequencies(column, normalizer)\n frequency_table = {}\n column_name_sym = column[:column_name].to_sym\n if normalizer\n sample.each do |row|\n elem = normalizer.call(row[column_name_sym])\n update_frequency_element(frequency_table, elem)\n end\n else\n sample.each do |row|\n elem = row[column_name_sym]\n update_frequency_element(frequency_table, elem)\n end\n end\n length = sample.count.to_f\n frequency_table.map { |key, value| value / length }\n end",
"def aggregations\n @aggregations ||= AggregationSet.new\n end",
"def mean\n @sum / @count\n end",
"def aggregation(*args, &block)\n @aggregations ||= AggregationsCollection.new\n @aggregations.update args.first => Aggregation.new(*args, &block)\n self\n end",
"def max(args)\n col = column field: args[:field]\n col.map! {|item| item = item.to_f} \n col.max\n end",
"def total_for(column, only_visible=true)\n ################\n # Smile specific : cache by only_visible AND column + COMPOSITE columns\n return nil unless column\n\n #-----------------------------\n # Smile specific : debug trace\n debug = nil\n if self.respond_to?('debug')\n debug = self.debug\n end\n\n @total_for_by_column ||= {true => {}, false => {}}\n\n if @total_for_by_column[only_visible][column.name]\n logger.debug \"==>prof #{@indent_spaces}from CACHE total_for(#{column.name}, ...#{', NOT only_visible' unless only_visible})\" if debug\n\n return @total_for_by_column[only_visible][column.name]\n end\n\n # TODO add hook for total_for_bar\n if self.respond_to?('total_for_bar')\n total_for_bar(column, only_visible, @total_for_by_column, :total_for)\n end\n\n if @total_for_by_column[only_visible][column.name]\n return @total_for_by_column[only_visible][column.name]\n end\n # END -- Smile specific : cache by only_visible AND column + COMPOSITE columns\n #######################\n\n if debug\n start = Time.now\n\n logger.debug \" =>prof\"\n logger.debug \"\\\\=>prof #{@indent_spaces}total_for(#{column.name}#{', NOT only_visible' unless only_visible}) NATIVE\"\n end\n\n the_scope = base_scope\n\n ################\n # Smile specific #994 Budget and Remaining enhancement\n if respond_to?(:joins_additionnal)\n # Smile comment : does NOT need joins for order_option here\n the_scope = the_scope.joins( joins_additionnal(nil) )\n end\n # END -- Smile specific #994 Budget and Remaining enhancement\n #######################\n\n ################\n # Smile specific : + param only_visible\n @total_for_by_column[only_visible][column.name] = total_with_scope(column, the_scope, only_visible)\n\n #-----------------------------\n # Smile specific : debug trace\n if debug\n logger.debug \"/=>prof #{@indent_spaces}total_for(#{column.name}#{', NOT only_visible' unless only_visible}) -- #{format_duration(Time.now - start, true)}\"\n end\n\n ################\n # Smile specific : cache\n @total_for_by_column[only_visible][column.name]\n end",
"def aggregate_f(*args)\n aggregate_function.f(*args)\n end",
"def value\n {\n start_time: @start_time,\n count: @count,\n sum: @sum,\n max: @max,\n min: @min,\n sum_of_squared_deviation: @sum_of_squared_deviation,\n buckets: @buckets,\n bucket_counts: @bucket_counts\n }\n end",
"def col_sum(col_num)\r\n\t\tsum = 0\r\n\t\tself.get_column(col_num).each do |x|\r\n\t\t\tsum += x\r\n\t\tend\r\n\t\tsum\r\n\tend",
"def aggregate(array, type = 'normal')\n raise \"Need Array not #{array.class}\" unless array.class == Array\n raise 'Two Arrays are not the same size' unless size == array.size\n\n case type\n when 'normal'\n return aggregate_normal(array)\n when 'max'\n return aggregate_max(array)\n when 'min'\n return aggregate_min(array)\n when 'avg'\n return aggregate_avg(array)\n when 'median'\n return aggregate_median(array)\n end\n end",
"def total_value(attribute = nil)\n per_unit = block_given? ? yield : @node.public_send(attribute)\n per_unit * @number_of_units\n end",
"def cost_per_person\n self.total_cost / @group\n end",
"def average(attribute_name, options = {})\n calculate(:avg, attribute_name, options)\n end",
"def calc_metric(data)\n metric = 0\n if data\n ALL_FIELDS.each do |field|\n next unless data[field]\n\n # This fixes the double-counting of created records.\n if field.to_s =~ /^(\\w+)_versions$/\n data[field] -= data[Regexp.last_match(1)] || 0\n end\n metric += FIELD_WEIGHTS[field] * data[field]\n end\n metric += data[:languages].to_i\n metric += data[:bonuses].to_i\n data[:metric] = metric\n end\n metric\n end",
"def total_sum\n rows.collect(&:sum_with_vat).sum\n end",
"def column_value_decimal\n column_value.to_f.to_s\n end",
"def total\n votes.sum(:value)\n end",
"def value\n @collection.sum\n end",
"def summarize_field(records, field)\n extracted = Hash.new(0)\n records.each do |record|\n begin\n value = field.split(\".\").inject(record) { |hash, key| hash[key] }\n rescue\n value = 'no such field'\n end\n extracted[value] += 1\n end\n return extracted\nend",
"def value()\n total = self.unique_transactions().values.inject(0, &:+)\n\n self.counts.values.each do |a|\n total += a[\"total\"]\n end\n\n total\n end",
"def apply(_aggregate)\n raise NotImplementedError\n end",
"def score_sql\n 'SUM(score) AS score'\n end",
"def sum_values\n points.sum\n end"
] | [
"0.6312181",
"0.5965439",
"0.5858012",
"0.58323276",
"0.57973707",
"0.57532007",
"0.5661123",
"0.5625472",
"0.55592465",
"0.5552942",
"0.55311996",
"0.54411584",
"0.5381846",
"0.536721",
"0.5345193",
"0.52855587",
"0.52511686",
"0.52275676",
"0.5203544",
"0.51965743",
"0.5191134",
"0.5178399",
"0.5173893",
"0.51612914",
"0.51204044",
"0.50676554",
"0.5041185",
"0.5020532",
"0.49831152",
"0.49694818",
"0.49692962",
"0.49455348",
"0.49336222",
"0.48698252",
"0.48407555",
"0.4826382",
"0.48045403",
"0.47991362",
"0.4798203",
"0.47811744",
"0.4775124",
"0.4773287",
"0.47726405",
"0.47669533",
"0.47598034",
"0.47396109",
"0.47376215",
"0.4726321",
"0.4722351",
"0.47214472",
"0.4718122",
"0.47157273",
"0.47035667",
"0.46972927",
"0.46878707",
"0.46669364",
"0.46337506",
"0.4628738",
"0.46195817",
"0.45981863",
"0.4597929",
"0.45976678",
"0.45928133",
"0.45862702",
"0.45726722",
"0.45638695",
"0.45593736",
"0.45496702",
"0.4545963",
"0.4544785",
"0.45382679",
"0.45351705",
"0.45329025",
"0.45228714",
"0.45161363",
"0.45158687",
"0.45095792",
"0.45069388",
"0.4497932",
"0.44900408",
"0.448648",
"0.4485855",
"0.4482925",
"0.44728315",
"0.4472426",
"0.44693956",
"0.44671422",
"0.44666332",
"0.44657815",
"0.4457307",
"0.4454392",
"0.44536114",
"0.44464856",
"0.44459325",
"0.44432747",
"0.4437823",
"0.44332576",
"0.44313753",
"0.44290206",
"0.44249693"
] | 0.6134351 | 1 |
Use pluck as a shortcut to select one or more attributes without loading an entire record object per row. Person.pluck(:name) instead of Person.all.map(&:name) Pluck returns an Array of attribute values typecasted to match the plucked column names, if they can be deduced. Plucking an SQL fragment returns String values by default. Person.pluck(:name) SELECT people.name FROM people => ['David', 'Jeremy', 'Jose'] Person.pluck(:id, :name) SELECT people.id, people.name FROM people => [[1, 'David'], [2, 'Jeremy'], [3, 'Jose']] Person.distinct.pluck(:role) SELECT DISTINCT role FROM people => ['admin', 'member', 'guest'] Person.where(age: 21).limit(5).pluck(:id) SELECT people.id FROM people WHERE people.age = 21 LIMIT 5 => [2, 3] Person.pluck(Arel.sql('DATEDIFF(updated_at, created_at)')) SELECT DATEDIFF(updated_at, created_at) FROM people => ['0', '27761', '173'] See also ids. | def pluck(*column_names)
return [] if @none
if loaded? && all_attributes?(column_names)
result = records.pluck(*column_names)
if @async
return Promise::Complete.new(result)
else
return result
end
end
if has_include?(column_names.first)
relation = apply_join_dependency
relation.pluck(*column_names)
else
klass.disallow_raw_sql!(column_names.flatten)
columns = arel_columns(column_names)
relation = spawn
relation.select_values = columns
result = skip_query_cache_if_necessary do
if where_clause.contradiction?
ActiveRecord::Result.empty(async: @async)
else
klass.connection.select_all(relation.arel, "#{klass.name} Pluck", async: @async)
end
end
result.then do |result|
type_cast_pluck_values(result, columns)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pluck(column_name)\n merge_array([\"$pluck\", [column_name.to_s]])\n map { |r| r.send(column_name.to_sym) }\n end",
"def pluck(name)\n map { |t| t[name] }\n end",
"def pluck(*columns)\n fail ArgumentError, 'No columns specified for Query#pluck' if columns.size.zero?\n\n query = return_query(columns)\n columns = query.response.columns\n\n if columns.size == 1\n column = columns[0]\n query.map { |row| row[column] }\n else\n query.map { |row| columns.map { |column| row[column] } }\n end\n end",
"def pluck( field_name )\n\n end",
"def pluck_instances(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_instance(options).pluck(*cols)\n end",
"def pluck(&block)\n scope.pluck(*build_attributes(true, &block))\n end",
"def pluck(arr, prop)\n result = []\n arr.each do |n|\n result << n[prop]\n end\n result\nend",
"def pluck(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row }\n result\n end",
"def async_pluck(*column_names)\n async.pluck(*column_names)\n end",
"def pluck(ary, key)\n # your implementation here\n ary.map {|item| item[key]}\nend",
"def pluck_rows(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_row(options).pluck(*cols)\n end",
"def pluck(coll, attr_name)\n coll.map{|ct| ct[attr_name]}.uniq.compact\n end",
"def pick(*column_names)\n if loaded? && all_attributes?(column_names)\n result = records.pick(*column_names)\n return @async ? Promise::Complete.new(result) : result\n end\n\n limit(1).pluck(*column_names).then(&:first)\n end",
"def pluck(attribute)\n @legs.pluck(attribute).compact\n end",
"def pluck(*fields)\n fields = fields.flatten(1).reject(&:blank?).map(&:to_s)\n\n source_fields = fields - EVERFIELDS\n scope = except(FIELD_STORAGES, EXTRA_STORAGES)\n .source(source_fields.presence || false)\n\n hits = raw_limit_value ? scope.hits : scope.scroll_hits(batch_size: DEFAULT_PLUCK_BATCH_SIZE)\n hits.map do |hit|\n if fields.one?\n fetch_field(hit, fields.first)\n else\n fields.map do |field|\n fetch_field(hit, field)\n end\n end\n end\n end",
"def pluck(*keys)\n keys = keys.flatten\n map {|h| h.pluck(keys)}\n # if keys.count > 1\n # map {|h| keys.map {|k| h[k]}}\n # else\n # map {|h| h[keys.first]}\n # end\n end",
"def pluck(field)\n\t\t\tif self.general_info.include? field\n\t\t\t\tself.result.map{ |p| p[field] }\n\t\t\telse\n\t\t\t\tself.result.map{ |p| p['participant_information'].select{ |a| a['desc'] == field } }.flatten.map{ |x| x['info'] }\n\t\t\tend\n\t\tend",
"def collection\n\n\n\n if !options[:collection] && column\n\n\n\n pluck_column\n\n\n\n else\n\n\n\n super\n\n\n\n end\n\n\n\n end",
"def pluck_one(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row.first }\n result\n end",
"def pluck_unique(column_name, results = last_results)\n results.map {|r| r[column_name]}.uniq\nend",
"def pluck(ary, key)\n found_value = Array.new\n ary[0].fetch(key)\n ary.each {|item| found_value << item.fetch(key)}\n found_value\nend",
"def test_pluck\n store_names [\"Product A\", \"Product B\"]\n assert_equal [\"Product A\", \"Product B\"], Product.search(\"product\").pluck(:name).sort\n end",
"def async_pick(*column_names)\n async.pick(*column_names)\n end",
"def select(sql, name = nil)\n fields, rows = select_raw(sql, name)\n result = []\n for row in rows\n row_hash = {}\n fields.each_with_index do |f, i|\n val = row[i]\n row_hash[f.to_s] = val.respond_to?(:rstrip) ? val.rstrip : val\n end\n result << row_hash\n end\n result\n end",
"def select_col(col)\n select(col).all.map { |row| row[col]}\n end",
"def _pick_attribute(names)\n [names].flatten.each do |name|\n return self.send(name) if self.respond_to?(name)\n end\n return nil\n end",
"def batch_pluck(type:, query: {}, fields:, config: {}, &blk)\n return to_enum(__callee__, type: type, query: query, fields: fields, config: config) unless block_given?\n\n # If the storage adapter implements batch_pluck, delegate\n if policy_machine_storage_adapter.respond_to?(:batch_pluck)\n policy_machine_storage_adapter.batch_pluck(type, query: query, fields: fields, config: config, &blk)\n else\n Warn.once(\"WARNING: batch_pluck is not implemented for storage adapter #{policy_machine_storage_adapter.class}\")\n results = batch_find(type: type, query: query, config: config) do |batch|\n yield batch.map { |elt| convert_pe_to_fields(elt, fields) }\n end\n end\n end",
"def attribute_values \n columns = self.class.columns\n columns.map { |col| self.send(col) } #=> ex. [:id, :name, :owner_id]\n end",
"def liked_users \n users = object.liked_users.collect do |liked_user|\n # user = User.find_by(id: liked_user.liked_user_id)\n # user.attributes.except!(\"created_at\", :updated_at, :email)\n user = User.select(:id, :name, :gender, :age, :bio).find_by(id: liked_user.liked_user_id)\n end\n end",
"def pluck(hashes, key)\n result = []\n for hash in hashes\n #{a:1}[a]\n result << hash[key]\n end\n result\nend",
"def _select_map_single\n rows = []\n clone(:_sequel_pg_type=>:first).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def select(*columns)\n @options[:select] = \"SPECIFIC_ATTRIBUTES\"\n @options[:attributes_to_get] = columns.map(&:to_s)\n self\n end",
"def _select_map_multiple(ret_cols)\n rows = []\n clone(:_sequel_pg_type=>:array).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def pluck selector = {}, &block\n quantity = (selector.delete :quantity).to_i\n if blocks?\n unless (result = find_by selector, &block).empty?\n result = result[0..(quantity - 1)] if quantity > 0\n result.each {|b| b.set_attr 'skip-option', '' }\n end\n else\n result = []\n end\n quantity == 1 ? result[0] : result\n end",
"def attribute_values\n # call Array#map on SQLObject::columns, call send on the instance to \n # get the value\n self.class.columns.map { |attribute| self.send(attribute) }\n end",
"def find_by_query_columns\n raise \"override in model as find_by_query_columns, e.g. return ['name', 'description']\"\n end",
"def with_pseudo_columns(*attributes)\n attributes.blank? ? scoped : select(with_pseudo_columns_sql(*attributes))\n end",
"def with_pseudo_columns(*attributes)\n attributes.blank? ? scoped : select(with_pseudo_columns_sql(*attributes))\n end",
"def select(*columns)\n clone(:select => columns)\n end",
"def get_attributes(column, column_type, klass, options); end",
"def attributes\n query[ model_name ]\n end",
"def arel\n users = User.arel_table\n users.where(users[:name].eq(@name))\n .project(users[:email])\n end",
"def attribute_values\n self.class.columns.map { |column| send(column) }\n end",
"def select\n (@select ||= (defaults[:select] || [])).collect { |c| c == '*' ? all_columns.keys : c }.flatten\n end",
"def select_values(sql, name = nil)\n result = select_rows(sql, name)\n result.map { |v| v[0] }\n end",
"def to_a\n columns.map { |column| @attributes[column.name.underscore] }\n end",
"def select(sql, name = nil, binds = [])\n ret = exec_query(sql, name, binds)\n ActiveRecord::VERSION::MAJOR >= 4 ? ret : ret.to_a\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def select_all(arel, name = nil, binds = NO_BINDS, preparable: nil, async: false)\n binds = convert_legacy_binds_to_attributes(binds) if binds.first.is_a?(Array)\n super\n end",
"def select(*args)\n Criteria.new(self).select(*args)\n end",
"def get_patient\n @patient = Patient.pluck(:id,:name)\n render json: @patient\n end",
"def as_select_values\n User::table.select(where:\"options NOT LIKE '0%'\", colonnes:[:pseudo]).collect do |uid, udata|\n [uid, udata[:pseudo]]\n end\n end",
"def complete_name_list\n #User.select(:name).map{|user_record| user_record.name}\n end",
"def retrieve_name_columns\n fetch_columns.grep /name/\n end",
"def attribute_values\n self.class.columns.map { |col| self.send(col) }\n end",
"def select(sql, name = nil, binds = [])\n fields, rows = select_raw(sql, name)\n rows.map do |row|\n Hash[*fields.zip(row).flatten]\n end\n end",
"def select(*props)\n @query[:select] = props.join(',')\n self\n end",
"def raw cols\n ret = get_values [*cols]\n\n case cols\n when Array\n ret\n else\n ret.first\n end\n end",
"def from_activerecord(relation, *fields)\n fields = relation.klass.column_names if fields.empty?\n fields = fields.map(&:to_sym)\n\n result = relation.pluck(*fields).transpose\n Daru::DataFrame.new(result, order: fields).tap(&:update)\n end",
"def attribute_values\n self.class.columns.map { |attr| self.send(attr) }\n end",
"def raws cols\n ret = get_values [*cols], true\n\n case cols\n when Array\n ret\n else\n ret.first\n end\n end",
"def query_columns\n explicit_columns\n end",
"def select_rows(sql, name = nil)\n # last parameter indicates to return also column list\n result, columns = select(sql, name, true)\n result.map{ |v| columns.map{|c| v[c]} }\n end",
"def selects_all_female_bears_return_name_and_age\n 'SELECT bears.name, bears.age FROM bears WHERE gender = \"F\"'\nend",
"def attributes_to_sql(record)\n attributes.map { |attr| attribute_to_sql(attr, record) }\n end",
"def to_a\n @table.columns.map { |col| @attributes[col] }\n end",
"def select(fields)\n return SkyDB::Query.new(:client => self).select(fields)\n end",
"def sql_members\n members.map{|attr| \"`#{attr}`\" }.join(\", \")\n end",
"def read_attribute(attr_name)\n return unless attr_name\n\n attr_name = attr_name.to_s\n methods = self.class.generated_external_attribute_methods\n\n if methods.method_defined?(attr_name)\n if @attributes.has_key?(attr_name) || attr_name == 'id'\n methods.send(attr_name, @attributes[attr_name], @attributes, @attributes_cache, attr_name)\n end\n elsif !self.class.attribute_methods_generated?\n # If we haven't generated the caster methods yet, do that and\n # then try again\n self.class.define_attribute_methods\n read_attribute(attr_name)\n else\n # If we get here, the attribute has no associated DB column, so\n # just return it verbatim.\n @attributes[attr_name]\n end\n end",
"def columns\n select.inject([]) do |columns, column|\n columns << {\n :type => all_columns[column][:type]\n }.merge({\n :id => column.to_s,\n :label => column_label(column)\n })\n end\n end",
"def pick_array_field(data, field)\n data.map { |x| x[field] }\n end",
"def select(*args)\n Criteria.new(:all, self).select(*args)\n end",
"def get_nested_objects(self_association_method, pluck_field_sym, class_name)\n nested_objects_ids = self_association_method.pluck(pluck_field_sym)\n nested_objects_ids_string = nested_objects_ids.reduce('(') { |final_string, id| final_string + id.to_s + ','}.chop + ')'\n class_name.where(\"id in #{nested_objects_ids_string}\")\n end",
"def pluck_ids(targets)\n targets.pluck(:id) if targets\n end",
"def people\n Person.find(dogs.map(&:owner_id))\n end",
"def column attribute_name, opts = {}\n klass = opts[:class] || @default_opts[:class] || nil\n raise ArgumentError, 'You must specify a :class option, either explicitly, or using with_opts' if klass.nil?\n\n source_attribute = (opts[:from] || attribute_name).to_s\n\n define_method attribute_name do\n serialized_attrib_names = klass.columns.select {|c| c.cast_type.is_a?(ActiveRecord::Type::Serialized) }.map {|c| c.name.to_s }\n if serialized_attrib_names.include?(source_attribute.to_s)\n return YAML.load(@raw_attributes[attribute_name.to_s])\n end\n\n val = klass.columns_hash[source_attribute].type_cast_from_database(@raw_attributes[attribute_name.to_s])\n\n if val.is_a?(Time) && Time.respond_to?(:zone) && Time.zone.respond_to?(:utc_offset)\n # Adjust UTC times to rails timezone\n val.localtime(Time.zone.utc_offset)\n end\n\n return val\n end\n\n # bit mucky, a lot here that feels like it should be a little method of its own\n select_column = \"#{klass.table_name}.#{source_attribute}\"\n select_column += \" as #{attribute_name}\" if opts[:from]\n (@sql_select_columns ||= []) << select_column\n end",
"def select_all(arel, name = nil, binds = [])\n select(to_sql(arel), name, binds)\n end",
"def fetch_property(name)\n properties.where(\"name = ?\", name).first\n end",
"def ids\n primary_key_array = Array(primary_key)\n\n if loaded?\n result = records.map do |record|\n if primary_key_array.one?\n record._read_attribute(primary_key_array.first)\n else\n primary_key_array.map { |column| record._read_attribute(column) }\n end\n end\n return @async ? Promise::Complete.new(result) : result\n end\n\n if has_include?(primary_key)\n relation = apply_join_dependency.group(*primary_key_array)\n return relation.ids\n end\n\n columns = arel_columns(primary_key_array)\n relation = spawn\n relation.select_values = columns\n\n result = if relation.where_clause.contradiction?\n ActiveRecord::Result.empty\n else\n skip_query_cache_if_necessary do\n klass.connection.select_all(relation, \"#{klass.name} Ids\", async: @async)\n end\n end\n\n result.then { |result| type_cast_pluck_values(result, columns) }\n end",
"def extract_attributes!(attributes)\n active_source_attributes = attributes.inject({}) do |active_source_attributes, column_values|\n active_source_attributes[column_values.first] = attributes.delete(column_values.first) if self.class.column_names.include? column_values.first\n active_source_attributes\n end\n\n [ active_source_attributes, attributes ]\n end",
"def select(*col_names)\n r = @raw\n col_names.collect do |n|\n p = @vprocs[n]\n p ? p.call(r) : nil\n end\n end",
"def select(attributes)\n condition = attributes.collect {|k, v| \"#{k} = #{format_mysql(k, v)}\"}.join(\" AND \")\n results = @@connection.execute(\"SELECT * FROM base WHERE (#{condition});\")\n objects = []\n results.fetch_hash do |row|\n objects << new(row)\n end\n objects\n end",
"def fields\n column_names.map(&:to_sym)\n .reject { |c| [:id, :created_at, :updated_at].include? c }\n end",
"def sql_columns\n \"(#{attributes.keys.join(\", \")})\"\n end",
"def fields(*fields)\n result = []\n keys.sort.each do |name|\n result << self[name].pick(*fields)\n end\n result\n end",
"def gluck; end",
"def select(property_name)\n query_proxy = OData::Model::QueryProxy.new(self)\n query_proxy.select(property_name.to_sym)\n end",
"def backers\n Person.where(id: pledges.pluck(:person_id))\n end",
"def select(&block)\n ary = []\n self.class.members.each{|field|\n val = self[field]\n ary.push(val) if block.call(val)\n }\n ary\n end",
"def favorite_truck_names\n #favorites.pluck(:uname)\n ['go_burger']\n end",
"def method_missing(name, *args, &block)\n fn = name.to_s\n fnne = fn.gsub('=','')\n if (!self.attributes.keys.include?(fnne)) && self.connection.columns(self.class.table_name).map{|c| c.name}.include?(fnne)\n # for next time\n self.class.reset_column_information\n\n # for this time\n if self.new_record?\n self.attributes[fnne] = nil\n else\n self.attributes[fnne] = self.connection.select_all(\"select #{fnne} from #{self.class.table_name} where id = #{self.id}\")[0][fnne] rescue nil\n end\n\n return self.attributes[fnne]\n else\n super\n end\n end",
"def loaded_attributes\n properties.map{|p| p.name if attribute_loaded?(p.name)}.compact\n end",
"def pluck()\n \n for key in user.each_key do\n puts \"#{key}\"\n user[key] = gets.chomp\n end\n user\nend",
"def select\n return self[:select] if include?(:select)\n self[:select] ||= associated_class.table_name.*\n end",
"def mentionables_relation(mentioner, klass, opts = {})\n rel = klass.where(klass.primary_key =>\n self.select(:mentionable_id).\n where(:mentionable_type => klass.name.classify).\n where(:mentioner_type => mentioner.class.to_s).\n where(:mentioner_id => mentioner.id)\n )\n\n if opts[:pluck]\n rel.pluck(opts[:pluck])\n else\n rel\n end\n end",
"def columns\n single_record unless @columns\n @columns || []\n end",
"def query_return_array(sql, *binds)\n mysql.fetch(sql, *binds).all\n end",
"def to_a\n @to_a ||= @columns.map { |column| init_attribute(column) }\n end",
"def select_all(arel, name = nil, binds = [])\n add_column_types(select(to_sql(arel, binds), name, binds))\n end",
"def fat_record_select\n select = 'semantic_relations.id AS id, semantic_relations.created_at AS created_at, '\n select << 'semantic_relations.updated_at AS updated_at, '\n select << 'object_id, object_type, subject_id, predicate_uri, '\n select << 'obj_props.created_at AS property_created_at, '\n select << 'obj_props.updated_at AS property_updated_at, '\n select << 'obj_props.value AS property_value, '\n select << 'obj_sources.created_at AS object_created_at, '\n select << 'obj_sources.updated_at AS object_updated_at, obj_sources.type AS object_realtype, '\n select << 'obj_sources.uri AS object_uri, '\n select << 'subject_sources.uri AS subject_uri'\n select\n end"
] | [
"0.73665416",
"0.73337096",
"0.7298219",
"0.7228862",
"0.7130534",
"0.69548666",
"0.68428504",
"0.67168826",
"0.65780044",
"0.6573343",
"0.646296",
"0.63349944",
"0.6244439",
"0.6131515",
"0.60042065",
"0.5932946",
"0.58840156",
"0.5862882",
"0.5838078",
"0.57468736",
"0.5649879",
"0.55376023",
"0.53530383",
"0.5198877",
"0.51976186",
"0.5185764",
"0.518047",
"0.5133124",
"0.50933903",
"0.5084753",
"0.50817627",
"0.50052106",
"0.49730346",
"0.49578434",
"0.4931202",
"0.48991945",
"0.48815042",
"0.48815042",
"0.48431343",
"0.48371974",
"0.48175085",
"0.4797726",
"0.4767199",
"0.47476855",
"0.47476846",
"0.47414777",
"0.47347364",
"0.47269347",
"0.4722954",
"0.4719646",
"0.47037095",
"0.46937308",
"0.4693217",
"0.46874627",
"0.46824044",
"0.46742687",
"0.467406",
"0.46494672",
"0.46450555",
"0.46336883",
"0.4627931",
"0.4627183",
"0.46258152",
"0.45948696",
"0.45810848",
"0.45791918",
"0.4577034",
"0.45722598",
"0.4567794",
"0.45665133",
"0.4565156",
"0.45619166",
"0.45613533",
"0.45500058",
"0.45489982",
"0.45411336",
"0.45357057",
"0.45279175",
"0.4527051",
"0.45225564",
"0.45180678",
"0.4514697",
"0.45087627",
"0.4504213",
"0.45003298",
"0.44975677",
"0.44938454",
"0.44911185",
"0.44905573",
"0.44843557",
"0.4478778",
"0.44673756",
"0.44593105",
"0.44509235",
"0.44459862",
"0.4444538",
"0.44384316",
"0.4429195",
"0.44127455",
"0.44062248"
] | 0.7516116 | 0 |
Same as pluck but perform the query asynchronously and returns an ActiveRecord::Promise | def async_pluck(*column_names)
async.pluck(*column_names)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pluck(*column_names)\n return [] if @none\n\n if loaded? && all_attributes?(column_names)\n result = records.pluck(*column_names)\n if @async\n return Promise::Complete.new(result)\n else\n return result\n end\n end\n\n if has_include?(column_names.first)\n relation = apply_join_dependency\n relation.pluck(*column_names)\n else\n klass.disallow_raw_sql!(column_names.flatten)\n columns = arel_columns(column_names)\n relation = spawn\n relation.select_values = columns\n result = skip_query_cache_if_necessary do\n if where_clause.contradiction?\n ActiveRecord::Result.empty(async: @async)\n else\n klass.connection.select_all(relation.arel, \"#{klass.name} Pluck\", async: @async)\n end\n end\n result.then do |result|\n type_cast_pluck_values(result, columns)\n end\n end\n end",
"def pick(*column_names)\n if loaded? && all_attributes?(column_names)\n result = records.pick(*column_names)\n return @async ? Promise::Complete.new(result) : result\n end\n\n limit(1).pluck(*column_names).then(&:first)\n end",
"def async_pick(*column_names)\n async.pick(*column_names)\n end",
"def pluck(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row }\n result\n end",
"def pluck(&block)\n scope.pluck(*build_attributes(true, &block))\n end",
"def pluck_one(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row.first }\n result\n end",
"def pluck(*columns)\n fail ArgumentError, 'No columns specified for Query#pluck' if columns.size.zero?\n\n query = return_query(columns)\n columns = query.response.columns\n\n if columns.size == 1\n column = columns[0]\n query.map { |row| row[column] }\n else\n query.map { |row| columns.map { |column| row[column] } }\n end\n end",
"def pluck(column_name)\n merge_array([\"$pluck\", [column_name.to_s]])\n map { |r| r.send(column_name.to_sym) }\n end",
"def pluck(ary, key)\n found_value = Array.new\n ary[0].fetch(key)\n ary.each {|item| found_value << item.fetch(key)}\n found_value\nend",
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def async_lookup(args)\n unless (Hash === args)\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all do |rows|\n yield(rows.any? ? rows.first : nil)\n end\n\n return\n end",
"def ids\n primary_key_array = Array(primary_key)\n\n if loaded?\n result = records.map do |record|\n if primary_key_array.one?\n record._read_attribute(primary_key_array.first)\n else\n primary_key_array.map { |column| record._read_attribute(column) }\n end\n end\n return @async ? Promise::Complete.new(result) : result\n end\n\n if has_include?(primary_key)\n relation = apply_join_dependency.group(*primary_key_array)\n return relation.ids\n end\n\n columns = arel_columns(primary_key_array)\n relation = spawn\n relation.select_values = columns\n\n result = if relation.where_clause.contradiction?\n ActiveRecord::Result.empty\n else\n skip_query_cache_if_necessary do\n klass.connection.select_all(relation, \"#{klass.name} Ids\", async: @async)\n end\n end\n\n result.then { |result| type_cast_pluck_values(result, columns) }\n end",
"def pluck(name)\n map { |t| t[name] }\n end",
"def pluck( field_name )\n\n end",
"def pluck_instances(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_instance(options).pluck(*cols)\n end",
"def pluck(ary, key)\n # your implementation here\n ary.map {|item| item[key]}\nend",
"def _select_map_single\n rows = []\n clone(:_sequel_pg_type=>:first).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def query_return_array(sql, *binds)\n mysql.fetch(sql, *binds).all\n end",
"def fetch(sql, *params)\n rs = self.execute(sql, *params)\n self.execute(\"flush privileges\") # Always flush in case user wants to alter users\n return [] if self.interpreter.preview? && ! rs\n return rs.fetch_all rescue nil\n end",
"def pluck_unique(column_name, results = last_results)\n results.map {|r| r[column_name]}.uniq\nend",
"def pluck(arr, prop)\n result = []\n arr.each do |n|\n result << n[prop]\n end\n result\nend",
"def pluck(*fields)\n fields = fields.flatten(1).reject(&:blank?).map(&:to_s)\n\n source_fields = fields - EVERFIELDS\n scope = except(FIELD_STORAGES, EXTRA_STORAGES)\n .source(source_fields.presence || false)\n\n hits = raw_limit_value ? scope.hits : scope.scroll_hits(batch_size: DEFAULT_PLUCK_BATCH_SIZE)\n hits.map do |hit|\n if fields.one?\n fetch_field(hit, fields.first)\n else\n fields.map do |field|\n fetch_field(hit, field)\n end\n end\n end\n end",
"def to_a\n fetch(ids)\n end",
"def to_a\n fetch(ids)\n end",
"def pluck_rows(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_row(options).pluck(*cols)\n end",
"def result\n ActiveRecord::Base.connection.select_all(sql).entries\n end",
"def batch_pluck(type:, query: {}, fields:, config: {}, &blk)\n return to_enum(__callee__, type: type, query: query, fields: fields, config: config) unless block_given?\n\n # If the storage adapter implements batch_pluck, delegate\n if policy_machine_storage_adapter.respond_to?(:batch_pluck)\n policy_machine_storage_adapter.batch_pluck(type, query: query, fields: fields, config: config, &blk)\n else\n Warn.once(\"WARNING: batch_pluck is not implemented for storage adapter #{policy_machine_storage_adapter.class}\")\n results = batch_find(type: type, query: query, config: config) do |batch|\n yield batch.map { |elt| convert_pe_to_fields(elt, fields) }\n end\n end\n end",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def find_all(&block)\r\n copy_and_return(@records.select(&block))\r\n end",
"def query_wait sql, waiting_time = 10\n result = db[sql].all\n if result.empty?\n if waiting_time != 0\n sleep 1\n result = query_wait(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def fetch_all\n self.to_a\n end",
"def contacts\n Person.find_by_sql(contact_query(\"people.id, people.created_at\"))\n end",
"def select(sql)\n raise(ArgumentError, \"Bad sql parameter\") unless sql.kind_of?(String)\n\n client = ensure_connected\n\n Pod4.logger.debug(__FILE__){ \"select: #{sql}\" }\n query = client.execute(sql)\n\n rows = []\n query.each do |r| \n\n if block_given? \n rows << yield(r)\n else\n rows << r\n end\n\n end\n\n query.cancel \n rows\n\n rescue => e\n handle_error(e)\n end",
"def select_values(sql, name = nil)\n result = select_rows(sql, name)\n result.map { |v| v[0] }\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def retrieve_from_database\n query = \"SELECT * FROM #{@_table} WHERE #{@_pkey} = #{@_pkey_id}\"\n result = Taupe::Database.fetch(query, true)\n\n return nil if result.nil? || result.empty?\n\n result.each do |k, v|\n @_values[k.to_sym] = v if k.is_a? Symbol\n end\n\n Taupe::Cache.set @_cache_key, @_values unless @_cache_key.nil?\n end",
"def pluck selector = {}, &block\n quantity = (selector.delete :quantity).to_i\n if blocks?\n unless (result = find_by selector, &block).empty?\n result = result[0..(quantity - 1)] if quantity > 0\n result.each {|b| b.set_attr 'skip-option', '' }\n end\n else\n result = []\n end\n quantity == 1 ? result[0] : result\n end",
"def _select_map_multiple(ret_cols)\n rows = []\n clone(:_sequel_pg_type=>:array).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def prefetch_rows\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def pluck_message\n pluck_queue_message(name)\n end",
"def pluck(field)\n\t\t\tif self.general_info.include? field\n\t\t\t\tself.result.map{ |p| p[field] }\n\t\t\telse\n\t\t\t\tself.result.map{ |p| p['participant_information'].select{ |a| a['desc'] == field } }.flatten.map{ |x| x['info'] }\n\t\t\tend\n\t\tend",
"def pull_records(value)\n begin\n column = match_column(value) # determine which column contains the specified value\n unless column == \"\"\n results = [] # array to hold all matching hashes\n conn = open_db()\n query = \"select *\n from details\n join numbers on details.id = numbers.details_id\n join quotes on details.id = quotes.details_id\n where \" + column + \" ilike $1\n order by name\"\n conn.prepare('q_statement', query)\n rs = conn.exec_prepared('q_statement', [\"%\" + value + \"%\"])\n conn.exec(\"deallocate q_statement\")\n rs.each { |result| results.push(result) }\n return results\n else\n return [{\"quote\" => \"No matching record - please try again.\"}]\n end\n rescue PG::Error => e\n puts 'Exception occurred'\n puts e.message\n ensure\n conn.close if conn\n end\nend",
"def query sql\n result = db[sql].all\n return result\n end",
"def query_empty sql, waiting_time = 10\n result = db[sql].all\n if !result.empty?\n if waiting_time != 0\n sleep 1\n result = query(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def my_select(&prc)\n result_array = []\n self.my_each {|el| result_array << el if prc.call(el)}\n result_array\n end",
"def results\n @scope.where(@scope.primary_key => @ids).to_a\n end",
"def recipes\n db_connection do |conn|\n conn.exec(\"SELECT * FROM recipes\").to_a\n end\nend",
"def fetch_records(ids)\n model.where(id: ids)\n end",
"def query_single(sql, *params)\n results = run(sql, *params)\n results.each(as: :array, :first => true).first\n end",
"def lazy_select\n lazify.call(S.select)\n end",
"def pluck(attribute)\n @legs.pluck(attribute).compact\n end",
"def parallel_queries\n raise \"Model is missing id column\" if ar_class.columns.none? { |column| column.name == \"id\" }\n\n if ar_class.respond_to?(:scrubbable_scope)\n num_records = ar_class.send(:scrubbable_scope).count\n else\n num_records = ar_class.count\n end\n return [] if num_records == 0 # no records to import\n\n record_window_size, modulus = num_records.divmod(num_of_batches)\n if record_window_size < 1\n record_window_size = 1\n modulus = 0\n end\n\n start_id = next_id(ar_class: ar_class, offset: 0)\n queries = num_of_batches.times.each_with_object([]) do |_, queries|\n next unless start_id\n\n end_id = next_id(ar_class: ar_class, id: start_id, offset: record_window_size - 1)\n if modulus > 0\n end_id = next_id(ar_class: ar_class, id: end_id)\n modulus -= 1\n end\n queries << { id: start_id..end_id } if end_id\n start_id = next_id(ar_class: ar_class, id: end_id || start_id)\n end\n\n # just in case new records are added since we started, extend the end ID\n queries[-1] = [\"#{ar_class.quoted_table_name}.id >= ?\", queries[-1][:id].begin] if queries.any?\n\n queries\n end",
"def fetch\n row = @result.fetch\n return row unless @bind_result\n row.zip(@bind_result).map do |col, type|\n if col.nil?\n nil\n elsif [Numeric, Integer, Fixnum].include? type\n col.to_i\n elsif type == String\n col.to_s\n elsif type == Float && !col.is_a?(Float)\n col.to_i.to_f\n elsif type == Mysql::Time && !col.is_a?(Mysql::Time)\n if col.to_s =~ /\\A\\d+\\z/\n i = col.to_s.to_i\n if i < 100000000\n y = i/10000\n m = i/100%100\n d = i%100\n h, mm, s = 0\n else\n y = i/10000000000\n m = i/100000000%100\n d = i/1000000%100\n h = i/10000%100\n mm= i/100%100\n s = i%100\n end\n if y < 70\n y += 2000\n elsif y < 100\n y += 1900\n end\n Mysql::Time.new(y, m, d, h, mm, s)\n else\n Mysql::Time.new\n end\n else\n col\n end\n end\n end",
"def query(&block)\n items = assert_connected(table).query(&block)\n results = []\n items.each { |i| results << new(i) }\n results\n end",
"def fetch_records_field(record_class:, record_ids:, field:)\n output = []\n record_ids.each do |record_id|\n key = get_key_for_record_id(record_id: record_id, record_class: record_class)\n data = REDIS_APP_JOIN.hget(key, field)\n output << data if data # checks if nil\n end\n return output.uniq\n end",
"def select(sql, name = nil, binds = [])\n ret = exec_query(sql, name, binds)\n ActiveRecord::VERSION::MAJOR >= 4 ? ret : ret.to_a\n end",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def get_sighting_records(db)\r\n\r\n sighting_records = db.query(\"select * from sighting_details order by id\")\r\n\r\n return sighting_records.to_a\r\n\r\nend",
"def get_all_from_database\n model.all\n end",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def comments\n db_connection do |conn|\n conn.exec(\"SELECT * FROM comments\").to_a\n end\nend",
"def getfieldvalues\n #instantiate the Array here to be sure it is clear of values before populating\n @fieldvalues = Array.new\n @db.execute(\"select distinct(#{@fieldname}) from data\") do |value|\n v=value[0].to_s\n @fieldvalues << value[0].to_s\n end\nend",
"def pluck(hashes, key)\n result = []\n for hash in hashes\n #{a:1}[a]\n result << hash[key]\n end\n result\nend",
"def get_all_actors\n query = %Q{\n SELECT * FROM actors\n ORDER BY name\n }\n\n results = db_connection do |conn|\n conn.exec(query)\n end\n\n results.to_a\nend",
"def get_data\n\t\texecute unless @result\n\t\treturn get_data_from_result(@result)\n\tend",
"def promise_all\n _class_fetch_states[:all] = 'i'\n _promise_get(\"#{resource_base_uri}.json?timestamp=#{`Date.now() + Math.random()`}\").then do |response|\n collection = _convert_array_to_collection(response.json[self.to_s.underscore.pluralize])\n _class_fetch_states[:all] = 'f'\n _notify_class_observers\n warn_message = \"#{self.to_s}.all has been called. This may potentially load a lot of data and cause memory and performance problems.\"\n `console.warn(warn_message)`\n collection\n end.fail do |response|\n error_message = \"#{self.to_s}.all failed to fetch records!\"\n `console.error(error_message)`\n response\n end\n end",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def pluck(*keys)\n keys = keys.flatten\n map {|h| h.pluck(keys)}\n # if keys.count > 1\n # map {|h| keys.map {|k| h[k]}}\n # else\n # map {|h| h[keys.first]}\n # end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def query_data_from_db(num_workers, restart)\n restart_cond = restart ? 'WHERE random IS NULL' : ''\n\n connection.exec <<-SQL\n SELECT array_agg(rand.id) FROM (\n SELECT id, id % #{num_workers} AS worker_id\n FROM randoms #{restart_cond}\n ) rand GROUP BY rand.worker_id;\n SQL\n end",
"def query_result_set(query)\n\t result = ValueSet.new\n\t call(:query_result_set, query) do |marshalled_set|\n\t\tfor task in marshalled_set\n\t\t task = local_object(task)\n\t\t Distributed.keep.ref(task)\n\t\t result << task\n\t\tend\n\t end\n\n\t result\n\tend",
"def load_worker_array\n @worker_array = Worker.where(active: true).order(last_name: :asc).map { |worker_array| [worker_array.full_name, worker_array.badge_nbr]}\nend",
"def pending_sent_friend_requests\n friend_uids = []\n friend_uids = friend_uids + self.sent_friend_requests.pluck(:reciever_uid)\n return User.where(:uid => friend_uids)\nend",
"def all \n results = CONNECTION.execute(\"SELECT * FROM #{self.table_name}\")\n \n return self.results_as_objects(results)\n end",
"def select(*col_names)\n r = @raw\n col_names.collect do |n|\n p = @vprocs[n]\n p ? p.call(r) : nil\n end\n end",
"def getify_array(array_of_records)\n return array_of_records.map{|r| r.get}\n end",
"def rows\n @rows.lazy\n end",
"def get_stars()\n sql = \"\n SELECT stars.* FROM stars\n INNER JOIN castings\n ON castings.star_id = stars.id\n WHERE movie_id = $1;\n \"\n values = [@id]\n pg_array = SqlRunner.run(sql, values)\n stars = pg_array.map { |star| Star.new(star) }\n return stars\nend",
"def db_fetch\n \"SELECT *\" + from_table_where + sql_match_conditions\n end",
"def fetch_rows(sql, &block)\n execute(sql) do |r|\n r.each(:symbolize_keys => true, &block)\n end\n self\n end",
"def fetch_array\n IBM_DB.fetch_array(@stmt) if @stmt\n end",
"def select_one(sql)\n result = execute(sql)\n result.fetch_hash\n end",
"def fetch_join_rows(rows)\n conn = @model.connection\n join_table = conn.quote_table_name @ref.join_table\n assoc_fkey = conn.quote_column_name @ref.association_foreign_key\n fkey = conn.quote_column_name @ref.foreign_key\n quoted_ids = rows.map { |row|\n begin\n id = row.send @ref.active_record_primary_key\n rescue NoMethodError => e\n raise MissingColumnError.new(row, e.name)\n end\n conn.quote id\n }\n\n quoted_ids.any? ? conn.\n exec_query(\"SELECT #{fkey}, #{assoc_fkey} FROM #{join_table} WHERE #{fkey} IN (#{quoted_ids.join ','})\").\n rows : []\n end",
"def query_with_cursor(sql_query, limit = nil)\n places = []\n transaction do\n ActiveRecord::Base.uncached do\n cursor = connection.select_value(sql_query)\n place = nil\n begin\n results = many.next_with_attributes(cursor)\n unless results.empty?\n place = with_details(results)\n places << place\n if (limit)\n limit -= 1\n place = nil if limit == 0\n end\n else\n place = nil\n end\n end while !place.nil?\n end\n end\n places\n end",
"def fetch_rows(sql, opts=OPTS, &block)\n db.execute(sql){|result| process_result_set(result, opts, &block)}\n self\n end",
"def fetch_rows(sql)\n return cursor_fetch_rows(sql){|h| yield h} if @opts[:cursor]\n execute(sql){|res| yield_hash_rows(res, fetch_rows_set_cols(res)){|h| yield h}}\n end",
"def fetch_all!\n fetch_all(true)\n end",
"def load!\n records_by_identity = index_by { |record| record.key_values }\n\n record_set.find_each_row do |row|\n identity = row.values_at(*record_set.key_column_names)\n records_by_identity[identity].hydrate(row)\n end\n\n loaded_count = count { |record| record.loaded? }\n if loaded_count < count\n raise Cequel::Record::RecordNotFound,\n \"Expected #{count} results; got #{loaded_count}\"\n end\n\n self\n end",
"def select_all(stmt, bindvars={}, &p)\n sanity_check(stmt)\n rows = nil\n execute(stmt, bindvars) do |sth|\n if block_given?\n sth.each(&p)\n else\n rows = sth.fetch_all\n end\n end\n return rows\n end",
"def player_ids(league_id)\n database do |db|\n # return id\n return db.execute('SELECT PlayerID FROM Player\n WHERE LeagueID = :league_id\n ORDER BY PlayerID',\n league_id).flatten\n end\nend",
"def fetch\n\n caching_enabled = is_caching_enabled()\n if caching_enabled\n result = fetch_from_cache\n end\n\n if !caching_enabled || (caching_enabled && result == nil)\n db_return = query()\n\n result = make_result_object(db_return.columns.as_json, db_return.as_json)\n if caching_enabled\n store_in_cache result\n end\n end\n result\n end",
"def pending_recieved_friend_requests\n friend_uids = []\n friend_uids = friend_uids + self.received_friend_requests.pluck(:sender_uid)\n return User.where(:uid => friend_uids)\nend",
"def fetch_rows(sql)\n execute(sql) do |res|\n columns = set_columns(res)\n yield_hash_rows(res, columns) {|h| yield h}\n end\n end",
"def better_seeds_query\n data = House.plants.includes(:seeds)\n all_seeds = []\n\n data.each do |plant|\n all_seeds.push(plant.seeds)\n end\n\n all_seeds\n\n end",
"def my_select(&prc)\n arr = []\n self.my_each { |el| arr << el if prc.call(el) }\n arr\n end",
"def to_a\n resolve\n @items\n end",
"def fetch_all\n fetch_many0(nil, Array)\n end",
"def retrieve_tasks_for_person(p_id)\n assignments = @db_base.query_assignments_for_person(p_id)\n results = Array.new()\n assignments.each { |result|\n results << result[\"T_Id\"]\n }\n return results\n end",
"def pending_objects\n # Object.where('some_status IS NOT NULL AND my_email_sent_at IS NULL')\n []\n end"
] | [
"0.72998756",
"0.68385845",
"0.64958185",
"0.6487218",
"0.62413484",
"0.596295",
"0.5930414",
"0.57285315",
"0.56897277",
"0.5619563",
"0.55649126",
"0.55202496",
"0.54578054",
"0.5420883",
"0.54006255",
"0.53791386",
"0.5354856",
"0.53156954",
"0.52780575",
"0.52760834",
"0.5243712",
"0.5209707",
"0.5157086",
"0.5157086",
"0.5143534",
"0.50982475",
"0.50394964",
"0.5001675",
"0.49730468",
"0.49189943",
"0.4910986",
"0.48651078",
"0.48329607",
"0.48285744",
"0.48279247",
"0.4817132",
"0.47929886",
"0.4785482",
"0.47757444",
"0.4775003",
"0.47699055",
"0.47654277",
"0.4765363",
"0.47638297",
"0.47609273",
"0.475195",
"0.47408432",
"0.47270522",
"0.47149205",
"0.47017378",
"0.46812934",
"0.46584484",
"0.46376795",
"0.46362188",
"0.4628969",
"0.46142143",
"0.4604408",
"0.4602253",
"0.45942882",
"0.45754588",
"0.45658886",
"0.45561922",
"0.45534468",
"0.45510915",
"0.4540611",
"0.45303872",
"0.4523302",
"0.45158383",
"0.451178",
"0.451178",
"0.45020214",
"0.44967782",
"0.44910884",
"0.44868654",
"0.44866994",
"0.44726494",
"0.44628167",
"0.44604123",
"0.44599563",
"0.44529158",
"0.4452512",
"0.44519398",
"0.44436044",
"0.44385308",
"0.44380975",
"0.4436514",
"0.44301212",
"0.44294247",
"0.44281778",
"0.44229248",
"0.44048443",
"0.43999225",
"0.4399905",
"0.43954018",
"0.43891487",
"0.4383421",
"0.43771744",
"0.43770733",
"0.43751338",
"0.43692246"
] | 0.76475996 | 0 |
Pick the value(s) from the named column(s) in the current relation. This is shorthand for relation.limit(1).pluck(column_names).first, and is primarily useful when you have a relation that's already narrowed down to a single row. Just like pluck, pick will only load the actual value, not the entire record object, so it's also more efficient. The value is, again like with pluck, typecast by the column type. Person.where(id: 1).pick(:name) SELECT people.name FROM people WHERE id = 1 LIMIT 1 => 'David' Person.where(id: 1).pick(:name, :email_address) SELECT people.name, people.email_address FROM people WHERE id = 1 LIMIT 1 | def pick(*column_names)
if loaded? && all_attributes?(column_names)
result = records.pick(*column_names)
return @async ? Promise::Complete.new(result) : result
end
limit(1).pluck(*column_names).then(&:first)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pluck(*columns)\n fail ArgumentError, 'No columns specified for Query#pluck' if columns.size.zero?\n\n query = return_query(columns)\n columns = query.response.columns\n\n if columns.size == 1\n column = columns[0]\n query.map { |row| row[column] }\n else\n query.map { |row| columns.map { |column| row[column] } }\n end\n end",
"def pluck(*column_names)\n return [] if @none\n\n if loaded? && all_attributes?(column_names)\n result = records.pluck(*column_names)\n if @async\n return Promise::Complete.new(result)\n else\n return result\n end\n end\n\n if has_include?(column_names.first)\n relation = apply_join_dependency\n relation.pluck(*column_names)\n else\n klass.disallow_raw_sql!(column_names.flatten)\n columns = arel_columns(column_names)\n relation = spawn\n relation.select_values = columns\n result = skip_query_cache_if_necessary do\n if where_clause.contradiction?\n ActiveRecord::Result.empty(async: @async)\n else\n klass.connection.select_all(relation.arel, \"#{klass.name} Pluck\", async: @async)\n end\n end\n result.then do |result|\n type_cast_pluck_values(result, columns)\n end\n end\n end",
"def pluck(column_name)\n merge_array([\"$pluck\", [column_name.to_s]])\n map { |r| r.send(column_name.to_sym) }\n end",
"def pluck_one(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row.first }\n result\n end",
"def async_pick(*column_names)\n async.pick(*column_names)\n end",
"def select_value(sql, name = nil)\n if result = select_one(sql, name)\n result.values.first\n end\n end",
"def pluck( field_name )\n\n end",
"def pluck(name)\n map { |t| t[name] }\n end",
"def fetch_first_field(sql)\n fetch_first_row(sql)&.values&.first\n end",
"def select_values(sql, name = nil)\n result = select_rows(sql, name)\n result.map { |v| v[0] }\n end",
"def select_col(col)\n select(col).all.map { |row| row[col]}\n end",
"def get(field)\n # Get the first/only row as a Hash.\n result = CONNECTION.execute(\"SELECT * FROM #{table_name} WHERE id = #{@id}\").first\n\n # Return only the value for the key of the field we're seeking.\n return result[field]\n end",
"def async_pluck(*column_names)\n async.pluck(*column_names)\n end",
"def pluck(*fields)\n fields = fields.flatten(1).reject(&:blank?).map(&:to_s)\n\n source_fields = fields - EVERFIELDS\n scope = except(FIELD_STORAGES, EXTRA_STORAGES)\n .source(source_fields.presence || false)\n\n hits = raw_limit_value ? scope.hits : scope.scroll_hits(batch_size: DEFAULT_PLUCK_BATCH_SIZE)\n hits.map do |hit|\n if fields.one?\n fetch_field(hit, fields.first)\n else\n fields.map do |field|\n fetch_field(hit, field)\n end\n end\n end\n end",
"def select_one(sql, name = nil)\n select(sql, name).first\n end",
"def fetch_first_column(sql)\n fetch_rows(sql).map(&:values).map(&:first)\n end",
"def select_first!\n limit(1).select!.first\n end",
"def fetch\n if raw\n values = Array.wrap(super)\n (field =~ /_[a-z]$/) ? values : values.first\n else\n super\n end\n end",
"def _pick_attribute(names)\n [names].flatten.each do |name|\n return self.send(name) if self.respond_to?(name)\n end\n return nil\n end",
"def pluck(field)\n\t\t\tif self.general_info.include? field\n\t\t\t\tself.result.map{ |p| p[field] }\n\t\t\telse\n\t\t\t\tself.result.map{ |p| p['participant_information'].select{ |a| a['desc'] == field } }.flatten.map{ |x| x['info'] }\n\t\t\tend\n\t\tend",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def select_one(sql, name = nil)\n result = select_all(sql, name)\n result.first if result\n end",
"def pluck_instances(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_instance(options).pluck(*cols)\n end",
"def get(field)\n result = CONNECTION.execute(\"SELECT * FROM '#{tablename}' WHERE id = ?;\", @id).first\n result[field]\n end",
"def select_one(sql, name = nil)\n result = select(sql, name)\n result.nil? ? nil : result.first\n end",
"def get_first_value( sql, *bind_vars )\n execute( sql, *bind_vars ) { |row| return row[0] }\n nil\n end",
"def fetch_value(sql)\n # Get the row\n row = fetch_row(sql)\n\n # Check field count\n if row.count > 1\n check.critical(\"Expected to receive a single value, but result has more than one field\", \"SQL: #{sql}\\nResult: #{row.inspect}\")\n end\n\n return row.values.first\n end",
"def raw cols\n ret = get_values [*cols]\n\n case cols\n when Array\n ret\n else\n ret.first\n end\n end",
"def pluck(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row }\n result\n end",
"def [](name)\n name = dasherize(name)\n name.downcase!\n selected = select_field_for(name)\n case\n when selected.length > 1\n selected.map { |f| f }\n when !Utilities.blank?(selected)\n selected.first\n else\n nil\n end\n end",
"def pluck_rows(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_row(options).pluck(*cols)\n end",
"def sql_select_one(sql)\n result = sql_select_first_row(sql)\n return nil unless result\n result.first[1] # Value des Key/Value-Tupels des ersten Elememtes im Hash\n end",
"def get_value(column)\n if value = setted_columns[column.name.to_sym]\n value\n end\n end",
"def raws cols\n ret = get_values [*cols], true\n\n case cols\n when Array\n ret\n else\n ret.first\n end\n end",
"def pluck(ary, key)\n found_value = Array.new\n ary[0].fetch(key)\n ary.each {|item| found_value << item.fetch(key)}\n found_value\nend",
"def [](name)\n return self.column(name)\n end",
"def unpack\n if self.values().length == 1\n row = self.values()[0]\n if row.length == 1\n return row[0]\n else\n warn 'more than 1 column in result.'\n return row\n end\n else\n warn 'more than 1 row returned.'\n return self.values()\n end\n end",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def select_one population=self.population\n select( 1, population ).first\n end",
"def select_one population=self.population\n select( 1, population ).first\n end",
"def select_one(sql)\n result = execute(sql)\n result.fetch_hash\n end",
"def [](x)\n case x\n when self\n x\n when Hash\n find_by_hash(:first, x)\n when nil, Symbol\n find(:first, :conditions => [ 'code = ?', (x || '_').to_s ])\n when String\n find(:first, :conditions => [ 'uuid = ? OR code = ?', (x || '').to_s, (x || '_').to_s ])\n when Integer\n find(:first, :conditions => [ 'id = ?', x ])\n end\n end",
"def pluck(&block)\n scope.pluck(*build_attributes(true, &block))\n end",
"def first\n\t\trow = connection.get_first_row <<-SQL\n\t\t\tSELECT #{columns.join \",\"} FROM #{table}\n\t\t\tORDER BY id ASC LIMIT 1;\n\t\tSQL\n\t\tinit_object_from_row(row)\n\tend",
"def on(field)\n self[field].to_a.first\n end",
"def on(field)\n self[field].to_a.first\n end",
"def first(field)\n @attributes[field]\n end",
"def pluck(*keys)\n keys = keys.flatten\n map {|h| h.pluck(keys)}\n # if keys.count > 1\n # map {|h| keys.map {|k| h[k]}}\n # else\n # map {|h| h[keys.first]}\n # end\n end",
"def chooseColumn\n @metadata.chooseColumn\n end",
"def pluck(ary, key)\n # your implementation here\n ary.map {|item| item[key]}\nend",
"def select_one(stmt, bindvars={})\n sanity_check(stmt)\n row = nil\n execute(stmt, bindvars) do |sth|\n row = sth.fetch\n end\n row\n end",
"def select(field_or_fields)\n scoped :select => field_or_fields\n end",
"def collection\n\n\n\n if !options[:collection] && column\n\n\n\n pluck_column\n\n\n\n else\n\n\n\n super\n\n\n\n end\n\n\n\n end",
"def pluck_unique(column_name, results = last_results)\n results.map {|r| r[column_name]}.uniq\nend",
"def fetch_property(name)\n properties.where(\"name = ?\", name).first\n end",
"def get_first_row(*args)\n @db.get_first_row(*args)\n end",
"def sql_select_first_row(sql)\n result = sql_select_all(sql)\n return nil if result.empty?\n result[0].extend SelectHashHelper # Erweitern Hash um Methodenzugriff auf Elemente\n end",
"def select(*col_names)\n r = @raw\n col_names.collect do |n|\n p = @vprocs[n]\n p ? p.call(r) : nil\n end\n end",
"def select_one(sql, name = nil) end",
"def pick(name = default_fauxture_name)\n Sweatshop.pick(self, name)\n end",
"def [](field_name)\n f = field(field_name)\n f && f.value\n end",
"def first(n=1)\n return values[0] if self.class == BaseRelation && loaded && n == 1\n result = limit(n).load\n result.length == 1 ? result[0] : result\n end",
"def select\n (@select ||= (defaults[:select] || [])).collect { |c| c == '*' ? all_columns.keys : c }.flatten\n end",
"def get_single_column sql_text\n @client.query(sql_text, :as => :array).map{ |row| row[0] }\n end",
"def get(id, column_family_name, column_name)\n get_cell(id, column_family_name, column_name).try :value\n end",
"def column_value\n row.fetch(column){row[@column.to_s]}\n end",
"def pluck(arr, prop)\n result = []\n arr.each do |n|\n result << n[prop]\n end\n result\nend",
"def select(*columns)\n clone(:select => columns)\n end",
"def select(*args)\n Criteria.new(self).select(*args)\n end",
"def find_by(values)\n all.where(values).limit(1).query_as(:n).pluck(:n).first\n end",
"def column\n Column.where(id: object.column_id)[0]\n end",
"def get\n sel = self.selected\n if sel.length == 1\n sel.first\n else\n sel\n end\n end",
"def get_value(name)\n name = name.to_s.upcase.to_sym\n return @values_changed[name] if @values_changed.has_key?(name)\n return nil if new_record?\n column = @table.column(name)\n raise UnknownColumnError.new(name) unless column\n @values_cached[name] ||= column.unpack(@data[column.offset, column.size])\n end",
"def by_field(field_name)\n begin\n @arr[@column_map[field_name.to_s]]\n rescue TypeError\n nil\n end\n end",
"def [](name)\n name = name.to_sym\n @values[name] && @values[name].first\n end",
"def select(*columns)\n @options[:select] = \"SPECIFIC_ATTRIBUTES\"\n @options[:attributes_to_get] = columns.map(&:to_s)\n self\n end",
"def single_value(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.first_value_from(sql)}}\n end",
"def pick(number, *cards)\n cards.flatten.first(number)\n end",
"def[](field)\n return self.values[field.to_s]\n end",
"def pluck selector = {}, &block\n quantity = (selector.delete :quantity).to_i\n if blocks?\n unless (result = find_by selector, &block).empty?\n result = result[0..(quantity - 1)] if quantity > 0\n result.each {|b| b.set_attr 'skip-option', '' }\n end\n else\n result = []\n end\n quantity == 1 ? result[0] : result\n end",
"def select(fields)\n return SkyDB::Query.new(:client => self).select(fields)\n end",
"def find_by(**args)\n where(**args).first\n end",
"def first_row_from(table, options = {})\n\t\tselect_from(table, { limit: 1 }.merge(options)).first\n\tend",
"def _select_map_single\n rows = []\n clone(:_sequel_pg_type=>:first).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def get_from_db(column, table, where, value)\n if where.nil? || value.nil?\n return db.execute(\"SELECT #{column} FROM #{table}\")\n else\n return db.execute(\"SELECT #{column} FROM #{table} WHERE #{where} = ?\",value)\n end\n end",
"def first\n\n wi(fetch_all({}).first)\n end",
"def [](column)\n @values[column]\n end",
"def column attribute_name, opts = {}\n klass = opts[:class] || @default_opts[:class] || nil\n raise ArgumentError, 'You must specify a :class option, either explicitly, or using with_opts' if klass.nil?\n\n source_attribute = (opts[:from] || attribute_name).to_s\n\n define_method attribute_name do\n serialized_attrib_names = klass.columns.select {|c| c.cast_type.is_a?(ActiveRecord::Type::Serialized) }.map {|c| c.name.to_s }\n if serialized_attrib_names.include?(source_attribute.to_s)\n return YAML.load(@raw_attributes[attribute_name.to_s])\n end\n\n val = klass.columns_hash[source_attribute].type_cast_from_database(@raw_attributes[attribute_name.to_s])\n\n if val.is_a?(Time) && Time.respond_to?(:zone) && Time.zone.respond_to?(:utc_offset)\n # Adjust UTC times to rails timezone\n val.localtime(Time.zone.utc_offset)\n end\n\n return val\n end\n\n # bit mucky, a lot here that feels like it should be a little method of its own\n select_column = \"#{klass.table_name}.#{source_attribute}\"\n select_column += \" as #{attribute_name}\" if opts[:from]\n (@sql_select_columns ||= []) << select_column\n end",
"def column_for(col_name)\n relation.column_names.detect { |col| col == col_name }\n end",
"def pluck(coll, attr_name)\n coll.map{|ct| ct[attr_name]}.uniq.compact\n end",
"def first(options={})\r\n find(:first, options)\r\n end",
"def single(sql, values = [])\n r = $db.exec_params(sql, values)\n return nil if r.ntuples == 0\n convert_to_ruby_types(r.first)\nend",
"def first\n @values.first\n end",
"def select\n self[:select] ||= associated_class.table_name.*\n end",
"def first\n limit(1).to_a.first\n end",
"def select\n return self[:select] if include?(:select)\n self[:select] ||= associated_class.table_name.*\n end",
"def [](column)\n @values[column]\n end",
"def [](column)\n @values[column]\n end",
"def first\n self.take(1)[0]\n end",
"def id field, value\n\t\t# can't bind field name, oh well\n\t\t@db.get_first_value \"select id from #{@name} where #{field} = ?\", value\n\tend"
] | [
"0.68192977",
"0.6554405",
"0.6462056",
"0.6354714",
"0.62995535",
"0.62829036",
"0.62457407",
"0.5963593",
"0.58643395",
"0.56886715",
"0.5661968",
"0.5649603",
"0.5627481",
"0.5615555",
"0.5595623",
"0.5537795",
"0.54360074",
"0.5402875",
"0.53860325",
"0.5380074",
"0.5375733",
"0.5351063",
"0.53435117",
"0.53328216",
"0.5328181",
"0.5320466",
"0.53173995",
"0.5293118",
"0.5273873",
"0.5262553",
"0.52624756",
"0.52437794",
"0.5238224",
"0.5227238",
"0.5223018",
"0.5166597",
"0.51357245",
"0.5134648",
"0.50701624",
"0.50701624",
"0.50635797",
"0.5056513",
"0.50518876",
"0.50496584",
"0.5037035",
"0.5037035",
"0.5036035",
"0.5031668",
"0.4983175",
"0.4946119",
"0.4941438",
"0.4937283",
"0.4924788",
"0.48893598",
"0.48882177",
"0.48607725",
"0.48475426",
"0.48390314",
"0.48111674",
"0.4786702",
"0.47774637",
"0.47740236",
"0.47670332",
"0.47490728",
"0.47468534",
"0.4745675",
"0.47385058",
"0.47384027",
"0.47286993",
"0.4727639",
"0.4725517",
"0.47030392",
"0.46976835",
"0.46951705",
"0.46744663",
"0.46737182",
"0.46721473",
"0.4649922",
"0.46478963",
"0.4629059",
"0.4628622",
"0.46257904",
"0.46030924",
"0.45995942",
"0.45964694",
"0.45776916",
"0.4560728",
"0.45543522",
"0.45496312",
"0.45493284",
"0.45356083",
"0.45342374",
"0.45342264",
"0.45328528",
"0.45295522",
"0.45275825",
"0.4520516",
"0.4520516",
"0.45203716",
"0.4519149"
] | 0.77229655 | 0 |
Same as pick but perform the query asynchronously and returns an ActiveRecord::Promise | def async_pick(*column_names)
async.pick(*column_names)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pick(*column_names)\n if loaded? && all_attributes?(column_names)\n result = records.pick(*column_names)\n return @async ? Promise::Complete.new(result) : result\n end\n\n limit(1).pluck(*column_names).then(&:first)\n end",
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def async_lookup(args)\n unless (Hash === args)\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all do |rows|\n yield(rows.any? ? rows.first : nil)\n end\n\n return\n end",
"def async_pluck(*column_names)\n async.pluck(*column_names)\n end",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def sync\n raise \".sync can only be used on the client\" if Volt.client?\n\n result = nil\n error = nil\n\n self.then do |val|\n result = val\n end.fail do |err|\n error = err\n end\n\n if error\n err_str = \"Exception in Promise at .sync: #{error.inspect}\"\n err_str += error.backtrace.join(\"\\n\")\n Volt.logger.error(err_str)\n fail error\n else\n return result\n end\n end",
"def select\n execute_only(:select)\n end",
"def demand(promise)\n if promise.respond_to? :__result__\n promise.__result__\n else\n promise\n end\n end",
"def pluck(*column_names)\n return [] if @none\n\n if loaded? && all_attributes?(column_names)\n result = records.pluck(*column_names)\n if @async\n return Promise::Complete.new(result)\n else\n return result\n end\n end\n\n if has_include?(column_names.first)\n relation = apply_join_dependency\n relation.pluck(*column_names)\n else\n klass.disallow_raw_sql!(column_names.flatten)\n columns = arel_columns(column_names)\n relation = spawn\n relation.select_values = columns\n result = skip_query_cache_if_necessary do\n if where_clause.contradiction?\n ActiveRecord::Result.empty(async: @async)\n else\n klass.connection.select_all(relation.arel, \"#{klass.name} Pluck\", async: @async)\n end\n end\n result.then do |result|\n type_cast_pluck_values(result, columns)\n end\n end\n end",
"def query_result_set(query)\n\t result = ValueSet.new\n\t call(:query_result_set, query) do |marshalled_set|\n\t\tfor task in marshalled_set\n\t\t task = local_object(task)\n\t\t Distributed.keep.ref(task)\n\t\t result << task\n\t\tend\n\t end\n\n\t result\n\tend",
"def prefetch_rows\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def promise_find(id)\n sid = id.to_s\n record_in_progress = if _record_cache.has_key?(sid)\n _record_cache[sid]\n else\n self.new(id: id)\n end\n _promise_find(id, record_in_progress)\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def query_wait sql, waiting_time = 10\n result = db[sql].all\n if result.empty?\n if waiting_time != 0\n sleep 1\n result = query_wait(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def fetch\n\n caching_enabled = is_caching_enabled()\n if caching_enabled\n result = fetch_from_cache\n end\n\n if !caching_enabled || (caching_enabled && result == nil)\n db_return = query()\n\n result = make_result_object(db_return.columns.as_json, db_return.as_json)\n if caching_enabled\n store_in_cache result\n end\n end\n result\n end",
"def select_one(sql, *bindvars)\n ret = nil\n thread = Thread.new { ret = super }\n thread.join\n ret\n rescue Interrupt\n self.break\n raise\n end",
"def query(sql, *bind_values)\n @pool.acquire { |conn| conn.query(sql, *bind_values).first }\n end",
"def select(*args)\n get_repository_object.select(*args)\n end",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def fetch_and_enqueue(force_all_syncs=false)\n begin\n if force_all_syncs\n query = db.fetch(%Q(\n SELECT r.name, r.id FROM #{relation} r, users u WHERE\n (r.state = '#{CartoDB::Synchronization::Member::STATE_SUCCESS}'\n OR r.state = '#{CartoDB::Synchronization::Member::STATE_SYNCING}')\n AND u.id = user_id AND u.state = '#{Carto::User::STATE_ACTIVE}'\n ))\n else\n query = db.fetch(%Q(\n SELECT r.name, r.id, r.user_id FROM #{relation} r, users u\n WHERE EXTRACT(EPOCH FROM r.run_at) < #{Time.now.utc.to_f}\n AND u.id = user_id AND u.state = '#{Carto::User::STATE_ACTIVE}'\n AND\n (\n r.state = '#{CartoDB::Synchronization::Member::STATE_SUCCESS}'\n OR (r.state = '#{CartoDB::Synchronization::Member::STATE_FAILURE}'\n AND r.retried_times < #{CartoDB::Synchronization::Member::MAX_RETRIES})\n )\n ORDER BY ran_at\n ))\n end\n success = true\n rescue Exception => e\n success = false\n print_log(\"ERROR fetching sync tables: #{e.message}, #{e.backtrace}\", true)\n end\n\n if success\n print_log \"Fetched #{query.count} records\"\n force_all_syncs ? enqueue_all(query) : enqueue_rate_limited(query)\n end\n\n self\n end",
"def async_result()\n #This is a stub, used for indexing\n end",
"def single_record!\n if use_eager_all?\n obj = clone(:all_called=>true).all.first\n\n if opts[:eager_graph]\n obj = clone(:all_called=>true).where(obj.qualified_pk_hash).unlimited.all.first\n end\n\n obj\n else\n super\n end\n end",
"def fetch(sql, *params)\n rs = self.execute(sql, *params)\n self.execute(\"flush privileges\") # Always flush in case user wants to alter users\n return [] if self.interpreter.preview? && ! rs\n return rs.fetch_all rescue nil\n end",
"def execute\n klass.collection.find(selector, process_options) || []\n end",
"def parallel_queries\n raise \"Model is missing id column\" if ar_class.columns.none? { |column| column.name == \"id\" }\n\n if ar_class.respond_to?(:scrubbable_scope)\n num_records = ar_class.send(:scrubbable_scope).count\n else\n num_records = ar_class.count\n end\n return [] if num_records == 0 # no records to import\n\n record_window_size, modulus = num_records.divmod(num_of_batches)\n if record_window_size < 1\n record_window_size = 1\n modulus = 0\n end\n\n start_id = next_id(ar_class: ar_class, offset: 0)\n queries = num_of_batches.times.each_with_object([]) do |_, queries|\n next unless start_id\n\n end_id = next_id(ar_class: ar_class, id: start_id, offset: record_window_size - 1)\n if modulus > 0\n end_id = next_id(ar_class: ar_class, id: end_id)\n modulus -= 1\n end\n queries << { id: start_id..end_id } if end_id\n start_id = next_id(ar_class: ar_class, id: end_id || start_id)\n end\n\n # just in case new records are added since we started, extend the end ID\n queries[-1] = [\"#{ar_class.quoted_table_name}.id >= ?\", queries[-1][:id].begin] if queries.any?\n\n queries\n end",
"def fetch!(&block)\n object_versions do |object_version|\n fetch_object(object_version, &block)\n end\n\n @pool.wait(:done)\n end",
"def select(sql)\n raise(ArgumentError, \"Bad sql parameter\") unless sql.kind_of?(String)\n\n client = ensure_connected\n\n Pod4.logger.debug(__FILE__){ \"select: #{sql}\" }\n query = client.execute(sql)\n\n rows = []\n query.each do |r| \n\n if block_given? \n rows << yield(r)\n else\n rows << r\n end\n\n end\n\n query.cancel \n rows\n\n rescue => e\n handle_error(e)\n end",
"def take!\n take || raise_record_not_found_exception!\n end",
"def fetch\n @result = Result.new(data, :query => self)\n end",
"def select(*) end",
"def select(*args)\n call_query_method(:select, *args)\n end",
"def take!\n take or raise RecordNotFound\n end",
"def await(p)\n @eff.await.perform p\n end",
"def query(sql)\n if NB.neverblocking? && NB.reactor.running?\n send_query sql\n NB.wait(:read, IO.new(socket))\n get_result\n else\n super(sql)\n end\n end",
"def fetch(force=false)\n if @fetch.nil? || force\n query = @query.dup\n query[:q] = query[:q].join(\" \")\n perform_get(query)\n end\n\n @fetch\n end",
"def get_data\n\t\texecute unless @result\n\t\treturn get_data_from_result(@result)\n\tend",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def fetch\n @raw_result = opts_for_cache_proxy[:raw] == true\n\n result = if refresh_cache?\n execute_find(@raw_result)\n elsif cached.is_a?(AridCache::CacheProxy::Result)\n if cached.has_ids? && @raw_result\n self.cached # return it unmodified\n elsif cached.has_ids?\n fetch_from_cache # return a list of active records after applying options\n else # true if we have only calculated the count thus far\n execute_find(@raw_result)\n end\n else\n cached # some base type, return it unmodified\n end\n end",
"def query(sql, name = nil) #:nodoc:\n #log(sql, name) do\n #TODO: @async\n select_rows sql, name\n end",
"def _select_map_single\n rows = []\n clone(:_sequel_pg_type=>:first).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def each\n @pool.with do |conn|\n conn.send_query @sql\n conn.set_single_row_mode\n loop do\n res = conn.get_result\n break unless res\n res.check\n res.stream_each { |row| yield row }\n end\n end\n end",
"def synchronize_resultset; end",
"def load!\n records_by_identity = index_by { |record| record.key_values }\n\n record_set.find_each_row do |row|\n identity = row.values_at(*record_set.key_column_names)\n records_by_identity[identity].hydrate(row)\n end\n\n loaded_count = count { |record| record.loaded? }\n if loaded_count < count\n raise Cequel::Record::RecordNotFound,\n \"Expected #{count} results; got #{loaded_count}\"\n end\n\n self\n end",
"def ids\n primary_key_array = Array(primary_key)\n\n if loaded?\n result = records.map do |record|\n if primary_key_array.one?\n record._read_attribute(primary_key_array.first)\n else\n primary_key_array.map { |column| record._read_attribute(column) }\n end\n end\n return @async ? Promise::Complete.new(result) : result\n end\n\n if has_include?(primary_key)\n relation = apply_join_dependency.group(*primary_key_array)\n return relation.ids\n end\n\n columns = arel_columns(primary_key_array)\n relation = spawn\n relation.select_values = columns\n\n result = if relation.where_clause.contradiction?\n ActiveRecord::Result.empty\n else\n skip_query_cache_if_necessary do\n klass.connection.select_all(relation, \"#{klass.name} Ids\", async: @async)\n end\n end\n\n result.then { |result| type_cast_pluck_values(result, columns) }\n end",
"def run\n Concurrent::Promise.zip(*@promises).value!\n end",
"def select(&block); end",
"def query_empty sql, waiting_time = 10\n result = db[sql].all\n if !result.empty?\n if waiting_time != 0\n sleep 1\n result = query(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def select_one(sql)\n result = execute(sql)\n result.fetch_hash\n end",
"def execute(async)\n f = Fiber.current\n begin\n conn = acquire(f)\n conn.acquired_for_connection_pool += 1\n yield conn\n ensure\n conn.acquired_for_connection_pool -= 1\n conn.run_postponed_queries if conn.acquired_for_connection_pool == 0\n release(f) if !async && conn.acquired_for_connection_pool == 0\n end\n end",
"def fetch_query args={}\n query(args.clone)\nend",
"def fetch_query args={}\n query(args.clone)\nend",
"def load( callback )\n Fiber.new do\n begin\n @pgconn.query('select * from cars') do |result|\n result.check\n @cars = []\n result.each_row do |row|\n @cars << row\n end\n @status = :loaded\n end\n rescue\n p $!\n @status = :not_loaded\n end\n callback.call\n end.resume\n end",
"def _fetch(*)\n fail NotImplementedError\n end",
"def run(&block)\n @repository.query(self, &block)\n end",
"def query_single(sql, *params)\n results = run(sql, *params)\n results.each(as: :array, :first => true).first\n end",
"def select_and_lock(relation, limit:)\n relation = upcoming(relation)\n\n # FOR UPDATE SKIP LOCKED selects and locks entries, but skips those that\n # are already locked - preventing this transaction from being locked.\n sql = relation.to_sql + \" FOR UPDATE SKIP LOCKED\"\n sql += \" LIMIT #{limit}\" if limit\n\n item_class.find_by_sql(sql)\n end",
"def fetch(*)\n raise NotImplementedError, 'This should be defined in a subclass'\n end",
"def fetch_row(sql)\n # Run the query\n results = query(sql)\n\n # Check result counts\n if results.count == 0\n check.critical(\"Expected to receive a single row, but result set is empty\", \"SQL: #{sql}\")\n end\n if results.count > 1\n check.critical(\"Expected to receive a single row, but result has #{results.count} lines\", \"SQL: #{sql}\")\n end\n\n # Get the first and only row\n return results.first\n end",
"def recipes\n db_connection do |conn|\n conn.exec(\"SELECT * FROM recipes\").to_a\n end\nend",
"def fetch_records(ids)\n model.where(id: ids)\n end",
"def select!(&block); end",
"def first **args\n query( **( { order: \"@rid\" , limit: 1 }.merge args)).execute(reduce: true)\n\tend",
"def pick element\n element.perform :pick\n end",
"def first(n=1)\n return values[0] if self.class == BaseRelation && loaded && n == 1\n result = limit(n).load\n result.length == 1 ? result[0] : result\n end",
"def fetch *args, &proc\n invoke(*args, &proc).last\n end",
"def fetch!\n info \"FETCH\"\n Fetcher.run @dbi, @collections, @settings, @logger do |*args| update_db(*args) end # results will be passed to the update function\n end",
"def take_snapshot\n select\n end",
"def lazy_select\n lazify.call(S.select)\n end",
"def poll_result(id)\n query = get_query(id)\n get_query_result(query)\n end",
"def call(*args)\n tuples = execute(*args)\n\n if result == :one\n tuples.first\n else\n tuples\n end\n end",
"def call(*args)\n tuples = execute(*args)\n\n if result == :one\n tuples.first\n else\n tuples\n end\n end",
"def run\n if @prepared_type == :insert\n fetch_rows(prepared_sql){|r| return r.values.first}\n else\n super\n end\n end",
"def retrieve_tasks\n Query.get_data(@values[:id], @values[:start_time], @values[:end_time])\n end",
"def retrieve_tasks\n Query.get_data(@values[:id], @values[:start_time], @values[:end_time])\n end",
"def retrieve_tasks\n Query.get_data(@values[:id], @values[:start_time], @values[:end_time])\n end",
"def getDBValue(connection, query, id1, *id2)\r\n dbi_query = connection.prepare(query)\r\n dbi_query.execute(id1, *id2)\r\n #fetch the result\r\n return dbi_query.fetch\r\nend",
"def fetch_rows(sql, opts=OPTS, &block)\n db.execute(sql){|result| process_result_set(result, opts, &block)}\n self\n end",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def query(&block)\n items = assert_connected(table).query(&block)\n results = []\n items.each { |i| results << new(i) }\n results\n end",
"def promise_all\n _class_fetch_states[:all] = 'i'\n _promise_get(\"#{resource_base_uri}.json?timestamp=#{`Date.now() + Math.random()`}\").then do |response|\n collection = _convert_array_to_collection(response.json[self.to_s.underscore.pluralize])\n _class_fetch_states[:all] = 'f'\n _notify_class_observers\n warn_message = \"#{self.to_s}.all has been called. This may potentially load a lot of data and cause memory and performance problems.\"\n `console.warn(warn_message)`\n collection\n end.fail do |response|\n error_message = \"#{self.to_s}.all failed to fetch records!\"\n `console.error(error_message)`\n response\n end\n end",
"def query(&blk)\n @adapter.query(collection, self, &blk)\n end",
"def select(range, options = {})\n start, finish = range_pair(range)\n fetch_range :zrangebyscore, start, finish, options\n end",
"def fetch(direction = 'NEXT')\n @select.fetch_by_cursor(@name, direction, @connection)\n end",
"def execute_async(input_set = nil)\n if input_set == nil\n input_set = InputSet.new()\n end\n param_map = {\"source_id\" => TembooSession.get_identifier(),\n \"mode\" => \"async\",\n \"store_results\" => \"true\"}\n body = input_set.format_inputs()\n results = @session.post_request(get_path(), body, param_map)\n id = JSON.load(results.body())[\"id\"]\n execution = ChoreographyExecution.new(@session, id)\n return execution\n end",
"def fetch\n @fetched_record = nil\n return nil if @index >= @records.size\n rec = @records[@index]\n @index += 1\n @fetched_record = rec\n return rec\n end",
"def fetch_result(&block)\n if block_given?\n return @cue_list.select(&block)\n else\n return @cue_list\n end\n end",
"def query(&block)\n @delegate.query(block)\n end",
"def execute\n # First, execute the SQL, applying the valid after_filters\n ret = apply_after_filters(execute_sql)\n\n # Set changed property to true\n changed\n\n # Notify all observers of the ids of the current result\n # set\n notify_observers(\n ret.collect{|instance| instance.send(model.primary_key)},\n self\n )\n\n # Reset the Query\n reset!\n\n # Return the results\n ret\n end",
"def fetch(ids)\n data = nil\n\n model.synchronize do\n ids.each do |id|\n redis.queue(\"HGETALL\", namespace[id])\n end\n\n data = redis.commit\n end\n\n return [] if data.nil?\n\n [].tap do |result|\n data.each_with_index do |atts, idx|\n result << model.new(Utils.dict(atts).update(:id => ids[idx]))\n end\n end\n end",
"def fetch(id)\n search(id: id)[:records].first\n end",
"def fetch; end",
"def fetch; end",
"def select_first!\n limit(1).select!.first\n end",
"def find_by_sql(sql)\n connection.select_all(sql, \"#{name} Load\").inject([]) { |objects, record| objects << instantiate(record) }\n end",
"def fetch_multi(*ids)\n ensure_base_model\n raise_if_scoped\n raise NotImplementedError, \"fetching needs the primary index enabled\" unless primary_cache_index_enabled\n options = ids.extract_options!\n ids.flatten!(1)\n records = if IdentityCache.should_use_cache?\n require_if_necessary do\n cache_keys = ids.map {|id| rails_cache_key(id) }\n key_to_id_map = Hash[ cache_keys.zip(ids) ]\n key_to_record_map = {}\n\n coders_by_key = IdentityCache.fetch_multi(cache_keys) do |unresolved_keys|\n ids = unresolved_keys.map {|key| key_to_id_map[key] }\n records = find_batch(ids)\n key_to_record_map = records.compact.index_by{ |record| rails_cache_key(record.id) }\n records.map {|record| coder_from_record(record) }\n end\n\n cache_keys.map{ |key| key_to_record_map[key] || record_from_coder(coders_by_key[key]) }\n end\n else\n find_batch(ids)\n end\n records.compact!\n prefetch_associations(options[:includes], records) if options[:includes]\n records\n end",
"def select(db); end",
"def select(db); end",
"def fetch\n raise NotImplementedError\n end",
"def query_each(result_set) # :nodoc:\n\t result_set.each do |task|\n\t\tyield(task)\n\t end\n\n\tensure\n\t Roby.synchronize do\n\t\tif result_set\n\t\t result_set.each do |task|\n\t\t\tDistributed.keep.deref(task)\n\t\t end\n\t\tend\n\t end\n\tend"
] | [
"0.6451383",
"0.6228038",
"0.62044704",
"0.5987701",
"0.5839192",
"0.534773",
"0.5245327",
"0.5233414",
"0.5173465",
"0.51208097",
"0.5100011",
"0.5082961",
"0.50784767",
"0.50784767",
"0.5074259",
"0.5069922",
"0.50564957",
"0.5028578",
"0.50239676",
"0.50180995",
"0.50147325",
"0.5003771",
"0.50019425",
"0.49883363",
"0.49875212",
"0.49762046",
"0.49370724",
"0.4935221",
"0.49232256",
"0.48877922",
"0.48813328",
"0.48709393",
"0.4849172",
"0.48363802",
"0.4821191",
"0.48194113",
"0.48184705",
"0.4815844",
"0.48058826",
"0.4800559",
"0.4795495",
"0.4793415",
"0.4792432",
"0.47913",
"0.4787999",
"0.47732902",
"0.47704074",
"0.4769664",
"0.47609845",
"0.47552425",
"0.47439927",
"0.47439927",
"0.47424108",
"0.47388858",
"0.47386023",
"0.47162482",
"0.47145686",
"0.47134483",
"0.4708731",
"0.4704181",
"0.46968737",
"0.46910763",
"0.46740058",
"0.4657552",
"0.46558997",
"0.4652333",
"0.46444005",
"0.4644029",
"0.46367654",
"0.46354344",
"0.46342412",
"0.46342412",
"0.4632016",
"0.46317077",
"0.46317077",
"0.46317077",
"0.46207532",
"0.46200657",
"0.46099868",
"0.46095493",
"0.4607944",
"0.46051717",
"0.4604477",
"0.45984513",
"0.45947593",
"0.45898485",
"0.45880762",
"0.45870575",
"0.45766047",
"0.45701247",
"0.4570018",
"0.4569961",
"0.4569961",
"0.4568939",
"0.4565128",
"0.45639008",
"0.45635134",
"0.45635134",
"0.4554544",
"0.45525908"
] | 0.6798223 | 0 |
Returns the base model's ID's for the relation using the table's primary key Person.ids SELECT people.id FROM people Person.joins(:companies).ids SELECT people.id FROM people INNER JOIN companies ON companies.id = people.company_id | def ids
primary_key_array = Array(primary_key)
if loaded?
result = records.map do |record|
if primary_key_array.one?
record._read_attribute(primary_key_array.first)
else
primary_key_array.map { |column| record._read_attribute(column) }
end
end
return @async ? Promise::Complete.new(result) : result
end
if has_include?(primary_key)
relation = apply_join_dependency.group(*primary_key_array)
return relation.ids
end
columns = arel_columns(primary_key_array)
relation = spawn
relation.select_values = columns
result = if relation.where_clause.contradiction?
ActiveRecord::Result.empty
else
skip_query_cache_if_necessary do
klass.connection.select_all(relation, "#{klass.name} Ids", async: @async)
end
end
result.then { |result| type_cast_pluck_values(result, columns) }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def company_ids\n @cached_company_ids ||= @object.companies.legit.pluck(:id)\n end",
"def get_relation_ids(project_id = nil, base_ids = nil)\n\t\treturn [] if base_ids == []\n\t\tsubcatrels.in_project(project_id).among_denotations(base_ids).pluck(:id)\n\tend",
"def all_ids\n db.transaction(true) do |db|\n extract_model_ids(db)\n end\n end",
"def ids_reader\n if loaded?\n target.pluck(reflection.association_primary_key)\n elsif !target.empty?\n load_target.pluck(reflection.association_primary_key)\n else\n @association_ids ||= scope.pluck(reflection.association_primary_key)\n end\n end",
"def eager_loaded_ids(docs, metadata)\n if metadata.stores_foreign_key?\n docs.flat_map{ |doc| doc.send(metadata.foreign_key) }\n else\n docs.map(&:id)\n end\n end",
"def ancestor_ids\n read_attribute(self.base_class.structure_column).to_s.split(%r|[,/]|).uniq.map { |id| cast_primary_key(id) }\n end",
"def ids\n pluck(:id)\n end",
"def project_ids_relation\n limit_projects.select(:id).reorder(nil)\n end",
"def advisor_companies\n Company.where(:id.in => advisor_company_ids)\n end",
"def get_relation_hids(project_id = nil, base_ids = nil)\n\t\treturn [] if base_ids == []\n\t\tsubcatrels.in_project(project_id).among_denotations(base_ids).pluck(:hid)\n\tend",
"def get_ids (model_objects)\n ids = []\n if !model_objects.nil?\n \n for model_object in model_objects\n ids.push(model_object.id)\n end\n end\n return ids\n\n end",
"def ids_reader\n predicate = reflection.options.fetch(:has_member_relation)\n if loaded?\n target.map(&:id)\n else\n owner.resource.query({ predicate: predicate })\n .map { |s| ActiveFedora::Base.uri_to_id(s.object) } | target.map(&:id)\n end\n end",
"def companies\n company_ids = company_affiliations.with_access.map(&:company_id)\n Company.where(:id.in => company_ids)\n end",
"def results\n Opinion.where(id: super.pluck(:id))\n end",
"def find_all_ids(options={})\n find(:all, options.merge({:select => 'id'})).map(&:id) \n end",
"def get_ids_for_query\n if param.field.options[:definition]\n definition = param.field.options[:definition]\n else\n # Set up a definition\n definition = Definition.new\n definition.base = param.field.options[:base].is_a?(Proc) ? param.field.options[:base].call : param.field.options[:base]\n\n # Get the fields which we should search for\n fields = @field.is_a?(Array) ? @field : [@field]\n fields.each do |field|\n definition.fields << DefinitionField.new(field, :condition => Local, :value_transmogrification => param.field.options[:value_transmogrification])\n end\n end\n\n # Set up a query\n query = Query.new(definition)\n\n # Add all the fields\n query.group(:any) do |params|\n fields.each do |field|\n params << query.param(field, @operator, @value)\n end\n end\n\n ids = query.results.pluck(param.field.options[:foreign_key])\n\n if @operator == :blank\n all_ids = param.query.definition.base.pluck(:id)\n present_ids = definition.base.pluck(param.field.options[:foreign_key])\n ids = (all_ids - present_ids) + ids\n end\n\n ids\n\n end",
"def person_ids\n persons = Person.find_all_from_identifier(source: 'xkonto', identifier: username)\n return nil if persons.blank?\n return persons.map(&:id)\n end",
"def find_all(klass)\n ds = @model_class.where(:class_type=>klass.to_s)\n ds.all.map {|r| r[:id] }\n end",
"def omim_ids\n @table.keys\n end",
"def advised_companies\n Company.where(:id.in => advised_company_ids)\n end",
"def advised_companies\n Company.where(:id.in => advised_company_ids)\n end",
"def id\n if primary_key.is_a?(Array)\n id = Array.new\n primary_key.each do |key|\n sub_id = self.send(key)\n return nil if (id == \"\")\n id << sub_id\n end\n else\n id = self.send(self.class.primary_key)\n return nil if (id == \"\")\n end\n return id\n end",
"def ids_permitting( priv, keyword_args = {} )\n priv, association = disassemble_priv( priv )\n if association.nil?\n return self.ids_permitting_internal( priv, keyword_args.merge( :distinct => true ))\n else\n klass = self.class_for_associate(association)\n fk = self.reflect_on_association(association).foreign_key.to_s\n return <<-END_SQL\n (select id from #{table_name} where #{table_name}.#{fk} in\n #{klass.ids_permitting_internal( priv, keyword_args )})\n END_SQL\n end\n end",
"def get_ids_of_all_jobs\r\n result = [id, parent_job_id].compact\r\n result << Job.where([\"parent_job_id in (?)\",result]).select(:id).collect(&:id)\r\n result.flatten.uniq\r\n end",
"def acc_ids\n\t self.accounts.pluck(:id)\n\tend",
"def get_ids_from(table_name)\n DB::Queries.get_ids_from(table: table_name)\nend",
"def resolve_ids_sql\n\t\t(TopElements-[\"custom_note\", \"tabulation_area\"]).each do |element|\n\t\t\tparamlist = UnresolvedId.count(:parameter, :group => \"parameter\",\n\t\t\t :conditions => [\"source_id = ? AND object_class = ?\", \n\t\t\t @source.id, element.camelcase])\t\n\t\t\tparamlist.each do |param,cnt|\n\t\t\t\t#all params are ids, chop the last 3\n\t\t\t\tparam_name = param[0 .. param.size - 4]\n\n\t\t\t\t#get the type of association\n\t\t\t\telement_klass = element.camelcase.constantize\n\t\t\t\tassoc = element_klass.reflect_on_association(param_name.to_sym)\n\t\t\t\tassoc ||= element_klass.reflect_on_association(param_name.pluralize.to_sym)\n\t\t\t\traise \"no reflection between \"+element_klass.name+\" and \"+param_name+\" or \"+param_name.pluralize unless assoc\n\n\t\t\t\tparam_class = assoc.class_name\n\t\t\t\tcase assoc.macro\n\t\t\t\t\twhen :has_and_belongs_to_many\n\t\t\t\t\t\tinsert_table = [element_klass.name.tableize, param_class.tableize].sort.join(\"_\")\n\t\t\t\t\t\tresolve_ids_habtm_sql(@source, element_klass, param, param_class, insert_table)\n\t\t\t\t\twhen :belongs_to\n\t\t\t\t\t\tresolve_ids_belongs_to_sql(@source, element_klass, param, param_class)\n\t\t\t\t\twhen :has_many\n\t\t\t\t\t\tif assoc.options.keys.include?(:through)\n\t\t\t\t\t\t\tinsert_table = assoc.options[:through].to_s.tableize\n\t\t\t\t\t\t\tresolve_ids_habtm_sql(@source, element_klass, param, param_class, insert_table)\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\traise \"unexpected association macro type :has_many \"+element_klass.name+\" and \"+param_name\n\t\t\t\t\t\tend\n\t\t\t\t\twhen :has_one\n\t\t\t\t\t\traise \"unexpected association macro type :has_one between \"+element_klass.name+\" and \"+param_name\n\t\t\t\tend #case\n\n\t\t\tend #each param\n\t\tend #each TopElement\n\t\tresolve_ids_activerecord([\"CustomNote\",\"TabulationArea\"])\n\tend",
"def id_column\n IdMethods::ID_COLUMN\n end",
"def all_ids(_context)\n raise NotImplementedError\n end",
"def fetch_reference_ids(table, row)\n attributes = {}\n table.reference_columns.each do |c|\n new_id = nil\n if row[c.name.to_s].is_a?(Array)\n new_id = []\n row[c.name.to_s].each do |old_id|\n new_id << no_sql_connection.get_id_using_pre_mongified_id(c.references.to_s, old_id)\n end\n else\n new_id = no_sql_connection.get_id_using_pre_mongified_id(c.references.to_s, row[c.name.to_s])\n end\n attributes.merge!(c.name => new_id) unless new_id.nil?\n end\n attributes\n end",
"def gemd_ids_for(klass)\n ids = Recommendable.redis.smembers(Recommendable::Helpers::RedisKeyMapper.gemd_set_for(klass, id))\n ids.map!(&:to_i) if [:active_record, :data_mapper, :sequel].include?(Recommendable.config.orm)\n ids\n end",
"def ids_getter(name, metadata)\n ids_method = \"#{name.to_s.singularize}_ids\"\n re_define_method(ids_method) do\n send(name).only(:id).map(&:id)\n end\n self\n end",
"def related_concept_ids(db, *ids)\n ids\n end",
"def related_concept_ids(db, *ids)\n ids = ids.flatten\n other_ids = db[:mappings].where(concept_2_id: ids).where{Sequel.function(:lower, :relationship_id) =~ 'is_a'}.select_map(:concept_1_id)\n other_ids + ids\n end",
"def build_id_constraint(reflection, keys, value, table = nil, bind_param = false)\n table ||= reflection.aliased_table\n value, binds = build_binds_for_constraint(reflection, value, keys.foreign_key) \\\n if bind_param\n\n [reflection.build_id_constraint(table[keys.key], value), binds]\n end",
"def resolve_ids_belongs_to_sql(source, klass, param, param_class)\n\n\t\ttemptable = resolve_ids_temptable_sql(source, klass, param, param_class)\n\n\t\tupdate_statement = \"\n\t\tUPDATE #{klass.name.tableize} SET #{param} = (\n\t\t SELECT param_id FROM #{temptable} WHERE object_id=#{klass.name.tableize}.id)\" \n\t\tupdate_statement += \" WHERE source_id = #{source.id}\" if klass.name != \"Source\"\n\n\t\tsource.connection.execute update_statement\n\n\t\tresolve_ids_cleanup_sql(source, temptable)\n\n\t\treturn true\n\tend",
"def all_ancestor_ids\n ancestors.pluck(:id)\n end",
"def relation_primary_key(relation)\n relation_reflect(relation).association_primary_key\n end",
"def involved_people_ids\n (\n [self.scrum_master_id.to_s, self.product_owner_id.to_s] + self.team_member_ids + self.stakeholder_ids\n ).select {|u_id| !u_id.blank?}\n end",
"def associations_to_ids hash\n to_ids = {}\n hash.each_pair do |key, type|\n key_name = ['references', 'belongs_to'].include?(type.downcase) ? \"#{key}_id\" : key\n to_ids[key_name] = type\n end\n to_ids\n end",
"def id_finder\n @id_finder ||= extract_id ? :all_of : :where\n end",
"def rId\n pivot_table.relationships.for(self).Id\n end",
"def get_id_from_model(model_instance)\n model_instance[foreign_key_column_name]\n end",
"def for_ids(*ids)\n field = klass.fields[\"_id\"]\n ids.flatten!\n if ids.size > 1\n any_in(:_id => ids.map{ |id| field.serialize(id) })\n else\n where(:_id => field.serialize(ids.first))\n end\n end",
"def has_many_relations(ar_instance)\n\t\t\tcolumn_name = \"#{ar_instance.class.name.underscore}_id\"\n\t\t\tdescendents = ActiveRecord::Base.connection.tables\n\t\t\tdescendents.reject!{ |table| false unless table.classify.constantize rescue true }\n\t\t\tdescendents.reject!{ |table| true unless table.classify.constantize.column_names.include?(column_name) }\n\t\tend",
"def construct_id_map_for_composite(records)\n id_to_record_map = {}\n ids = []\n records.each do |record|\n primary_key ||= record.class.primary_key\n ids << record.id\n mapped_records = (id_to_record_map[record.id.to_s] ||= [])\n mapped_records << record\n end\n ids.uniq!\n return id_to_record_map, ids\n end",
"def persons\n Gallerist::Person.where modelId: person_photos.map(&:person_id)\n end",
"def persons\n Gallerist::Person.where modelId: person_photos.map(&:person_id)\n end",
"def ids\n @ids ||= []\n end",
"def has_many_identities(assoc, options={})\r\n fk = options[:foreign_key] || :\"#{model_name.to_s.underscore}_id\"\r\n pk = primary_key\r\n other_model_name = options[:class_name] || assoc.to_s.singularize.camelize\r\n other_model = other_model_name.to_s.constantize\r\n\r\n # all children iterations\r\n has_many :\"#{assoc}_iterations\", class_name: other_model_name, foreign_key: fk\r\n\r\n # current children:\r\n # has_many assoc, options.merge(conditions: [\"#{model.effective_to_column_sql} = :date\", :date=>END_OF_TIME)]\r\n define_method assoc do\r\n send(:\"#{assoc}_iterations\").current\r\n end\r\n # children at some date\r\n define_method :\"#{assoc}_at\" do |date=nil|\r\n # has_many assoc, options.merge(conditions: [%{#{model.effective_from_column_sql}<=:date AND #{model.effective_to_column_sql}>:date}, :date=>model.effective_date(date)]\r\n send(:\"#{assoc}_iterations\").at_date(date)\r\n end\r\n\r\n # children at today\r\n define_method :\"#{assoc}_at_present\" do\r\n send(:\"#{assoc}_iterations\").at_date(Date.today)\r\n end\r\n\r\n # children at today or some date\r\n define_method :\"#{assoc}_at_present_or\" do |date=nil|\r\n if date.nil?\r\n send(:\"#{assoc}_iterations\").at_date(Date.today)\r\n else\r\n send(:\"#{assoc}_iterations\").at_date(date)\r\n end\r\n end\r\n\r\n # children before today\r\n define_method :\"#{assoc}_past\" do\r\n send(:\"#{assoc}_iterations\").before_date(Date.today)\r\n end\r\n\r\n # children after today\r\n define_method :\"#{assoc}_upcoming\" do\r\n send(:\"#{assoc}_iterations\").after_date(Date.today)\r\n end\r\n\r\n # all children identities\r\n define_method :\"#{assoc}_identities\" do\r\n # send(:\"#{assoc}_iterations\").select(\"DISTINCT #{other_model.identity_column_sql}\").order(other_model.identity_column_sql).pluck(:identity)\r\n # other_model.unscoped.where(fk=>send(pk)).identities\r\n send(:\"#{assoc}_iterations\").identities\r\n end\r\n\r\n # children identities at a date\r\n define_method :\"#{assoc}_identities_at\" do |date=nil|\r\n # send(:\"#{assoc}_iterations_at\", date).select(\"DISTINCT #{other_model.identity_column_sql}\").order(other_model.identity_column_sql).pluck(:identity)\r\n # other_model.unscoped.where(fk=>send(pk)).identities_at(date)\r\n send(:\"#{assoc}_iterations\").identities_at(date)\r\n end\r\n\r\n # current children identities\r\n define_method :\"#{assoc}_current_identities\" do\r\n # send(assoc).select(\"DISTINCT #{other_model.identity_column_sql}\").order(other_model.identity_column_sql).pluck(:identity)\r\n # other_model.unscoped.where(fk=>send(pk)).current_identities\r\n send(:\"#{assoc}_iterations\").current_identities\r\n end\r\n\r\n # present children identities\r\n define_method :\"#{assoc}_present_identities\" do\r\n send(:\"#{assoc}_iterations\").present_identities\r\n end\r\n\r\n end",
"def expand_association_to_ids fields\n expanded = {}\n fields.each_pair do |name, type|\n case type\n when 'belongs_to'\n expanded[\"#{name}_id\"] = 'integer'\n end\n end\n fields.merge(expanded)\n end",
"def active_record_has_and_belongs_to_many(model, relation)\n return [] unless relation.macro == :has_and_belongs_to_many\n\n dump_proxy_table(model, relation)\n end",
"def worker_ids_from_model model\n workers_from_model(model).map{ |worker| worker[:id] }\n end",
"def owning_ids\n sql = \"SELECT cim.collection_id, ucm.unit_id, u.institution_id\n FROM collection_item_memberships cim\n LEFT JOIN unit_collection_memberships ucm ON ucm.collection_id = cim.collection_id\n LEFT JOIN units u ON u.id = ucm.unit_id\n WHERE cim.item_id = $1\n ORDER BY cim.primary DESC, ucm.primary DESC;\"\n values = [self.id]\n result = ActiveRecord::Base.connection.exec_query(sql, \"SQL\", values)\n # This will be nil for items not in a collection.\n result[0] ? result[0] : {\n 'collection_id' => nil,\n 'unit_id' => nil,\n 'institution_id' => nil\n }\n end",
"def real_id\n @id\n end",
"def real_id\n @id\n end",
"def get_selected_song_ids\n return get_songs_relation.pluck( 'songs.id' )\n end",
"def active_glm_id_list\n self.active_group_loan_memberships.map{|x| x.id }\n end",
"def map_entity_relations\n assignments = @db_base.query_assignments\n results = Hash.new()\n assignments.each { |result|\n results[result[\"P_Id\"]] = result[\"T_Id\"]\n }\n return results\n end",
"def belongs_to_relations(ar_instance)\n\t\t\tcolumns = ar_instance.class.column_names\n\t\t\tparents = columns.map{ |c| c if c =~ /_id/ }.reject{ |c| c.nil? }\n\t\t\tparents.map!{ |parents| parents.gsub('_id', '') }\n\t\tend",
"def article_ids\n query('SELECT Id FROM KnowledgeArticle').map(&:Id)\n end",
"def pluck_ids(targets)\n targets.pluck(:id) if targets\n end",
"def lookups(database_record)\n ids = {}\n\n for_mappings(database_record) do |mapping, lookup|\n associated = database_record.association(name).reader\n\n ids[lookup] =\n if associated\n # It's possible to define a belongs_to association in a Mapping\n # for what is actually a one-to-many association on the\n # ActiveRecord object.\n Array(associated).first.send(mapping.lookup_column)\n end\n end\n\n ids\n end",
"def laboratorios_id\n laboratorios.all.map { |l| l.id }\n end",
"def model_id\n model.id\n end",
"def self_and_siblings_ids\n parent ? parent.children_ids : self.class.roots.map {|x| x.id}\n end",
"def real_id(model_name, friendly_id)\n if friendly_id.to_s.to_i == 0\n obj = model_name.constantize.find(friendly_id)\n if obj\n return obj.id\n end\n end\n friendly_id\n end",
"def construct_id_map(records, primary_key=nil)\n id_to_record_map = {}\n ids = []\n records.each do |record|\n primary_key ||= record.class.primary_key\n ids << record[primary_key]\n mapped_records = (id_to_record_map[ids.last.to_s] ||= [])\n mapped_records << record\n end\n ids.uniq!\n return id_to_record_map, ids\n end",
"def id\n model.id.to_s\n end",
"def company_id\n e = Employee.find_by(user_id: id)\n e.company_id\n end",
"def prisoner_ids\n @prisoner_ids || prisoners.collect{|p| p.id}\n end",
"def resolve_ids_habtm_sql(source, klass, param, param_class, insert_table)\n\n\t\ttemptable = resolve_ids_temptable_sql(source, klass, param, param_class)\n\n\t\tsource.connection.execute \"\n\t\t\tINSERT INTO #{insert_table} (#{klass.name.underscore + \"_id\"}, #{param}) \n\t\t\t\tSELECT object_id, param_id FROM #{temptable};\"\n\n\t\tresolve_ids_cleanup_sql(source, temptable)\n\n\t\treturn true\n\tend",
"def assignable_people_ids\n ([self.scrum_master_id.to_s] + self.team_member_ids).select {|u_id| !u_id.blank?}\n end",
"def directs\n alias_ids.map(&:e).map(&:company).map(&:id)\n end",
"def related_id_field\n @related_id_field ||= association.klass.fields[\"_id\"]\n end",
"def all\n db.transaction(true) do\n ids = extract_model_ids(db)\n ids.map { |key| db[key] }\n end\n end",
"def ids(table)\n res = connection.query(\"SELECT id FROM #{table} GROUP BY id\")\n id_list = []\n res.each { |i| id_list << i[0].to_i }\n return id_list\n end",
"def ids_writer(ids)\n primary_key = reflection.association_primary_key\n pk_type = klass.type_for_attribute(primary_key)\n ids = Array(ids).compact_blank\n ids.map! { |id| pk_type.cast(id) }\n\n records = if klass.composite_primary_key?\n query_records = ids.map { |values_set| klass.where(primary_key.zip(values_set).to_h) }.inject(&:or)\n\n query_records.index_by do |record|\n primary_key.map { |primary_key| record._read_attribute(primary_key) }\n end\n else\n klass.where(primary_key => ids).index_by do |record|\n record._read_attribute(primary_key)\n end\n end.values_at(*ids).compact\n\n if records.size != ids.size\n found_ids = records.map { |record| record._read_attribute(primary_key) }\n not_found_ids = ids - found_ids\n klass.all.raise_record_not_found_exception!(ids, records.size, ids.size, primary_key, not_found_ids)\n else\n replace(records)\n end\n end",
"def ids(things)\n things.map(&:id).join(\",\")\n end",
"def ancestor_ids\n read_attribute(self.base_class.ancestry_column).to_s.split(%r|[,/]|).uniq.map { |id| cast_primary_key(id) }\n end",
"def id_key_for(model_class)\n get_key('ID_KEY', model_class) + '_id'\n end",
"def collection_to_id_array(col)\n ids = Array.new\n col.each do |i|\n ids << i.id\n end\n ids\n end",
"def find_ids(ids)\n ids.inject([]) { |x, id| x << all.find { |y| y.id == id }}\n end",
"def related_domains\n ids = DomainContact.select(:domain_id).where(contact_id: id).limit(11).map(&:domain_id).uniq\n res = Domain.where(id: ids).or(Domain.where(registrant_id: id)).select(:name, :uuid).limit(11)\n res.pluck(:name, :uuid).map { |name, id| { name: name, id: id } }\n end",
"def reader_ids\n group_user_ids(readers_join_table)\n end",
"def find_persisted_member_ids(resource:)\n connection[find_persisted_member_ids_query, resource.id.to_s].map do |member|\n member[:id]\n end\n end",
"def existing_ids\n return Thread.current[:existing_ids] if Thread.current[:existing_ids]\n\n ids_in = [].tap do |ids|\n transformer.map_column(update_on, records).each_slice(999) do |slice|\n ids << destination_model.arel_table[update_on].in(slice).to_sql\n end\n end \n \n Thread.current[:existing_ids] = Hash[destination_model.where(ids_in.join(\" OR \")).pluck(update_on, :id)]\n end",
"def get_ids(table)\r\n valid_ids = []\r\n table_info = @db.execute(\"SELECT * FROM #{table}\")\r\n table_info.each do |line|\r\n line_info = []\r\n line.each do |name, value|\r\n if name == 'id'\r\n valid_ids << value\r\n end\r\n end\r\n end\r\n valid_ids\r\n end",
"def build_dotted_ids\n self.parent ? \"#{self.parent.dotted_ids}.#{self.id}\" : self.id.to_s\n end",
"def driver_ids\n\t\tdrivers.map{|d| d.id.to_s + \"_\" + self.id.to_s}\n\tend",
"def company_id\n if @company.respond_to?(:to_hash)\n @company.fetch(:company_id) { @company }\n elsif @company.respond_to?(:company_id)\n @company.company_id\n else\n @company\n end\n end",
"def associated_records(ids)\n max_ids_in_a_list = connection.ids_in_list_limit || ids.size\n records = []\n ids.each_slice(max_ids_in_a_list) do |some_ids|\n records += yield(some_ids)\n end\n records\n end",
"def model_id(model)\n \"#{model.class.name}_#{model.id}\"\n end",
"def model_id(model)\n \"#{model.class.name}_#{model.id}\"\n end",
"def orchestrate_primary_key\n id\n end",
"def has_many(name, klass, field_name: \"#{name.to_s.chop}_ids\")\n field(field_name, :array)\n relations << name\n define_method(name) { send(field_name).map { |id| klass.new(id) } }\n end",
"def superfamily_ids\n superfamily_ids_set.to_a\n end",
"def category_ids\n self.associated_categories.collect{ |c| c.id }\n end",
"def involved_people\n involved_people_ids.collect{|user_id| User.find(user_id)}\n end",
"def ids(*values)\n values.inject(self) { |res, val| res._ids(val) }\n end"
] | [
"0.63226235",
"0.6315557",
"0.61727315",
"0.60769916",
"0.6016357",
"0.5820043",
"0.5757177",
"0.57131624",
"0.557167",
"0.55570906",
"0.5546322",
"0.5545649",
"0.5503937",
"0.54966134",
"0.54735273",
"0.5456828",
"0.54509854",
"0.54426056",
"0.54343647",
"0.5417242",
"0.5417242",
"0.5413907",
"0.54125226",
"0.5385669",
"0.53558624",
"0.5332815",
"0.52971333",
"0.52827543",
"0.5281784",
"0.528005",
"0.5268121",
"0.5229694",
"0.5228203",
"0.52051336",
"0.51797026",
"0.5174662",
"0.5164981",
"0.5157014",
"0.51447093",
"0.5140685",
"0.5130038",
"0.5119313",
"0.5118393",
"0.5112714",
"0.51049066",
"0.509678",
"0.5092305",
"0.5092305",
"0.50886506",
"0.50841635",
"0.508151",
"0.50796074",
"0.5078888",
"0.5078448",
"0.50757194",
"0.50757194",
"0.5074377",
"0.50723004",
"0.5072034",
"0.5064405",
"0.50638247",
"0.50603086",
"0.5059632",
"0.5054824",
"0.5052908",
"0.5052899",
"0.50457555",
"0.50444067",
"0.5038834",
"0.503269",
"0.5031887",
"0.5022314",
"0.500516",
"0.50039285",
"0.4995455",
"0.49890745",
"0.49833274",
"0.4981116",
"0.49805874",
"0.49746716",
"0.49732232",
"0.49709055",
"0.49661282",
"0.4943679",
"0.49284786",
"0.49283004",
"0.4926636",
"0.49246743",
"0.4924474",
"0.49243426",
"0.4907651",
"0.4907564",
"0.49009883",
"0.49009883",
"0.49009258",
"0.49007586",
"0.49001536",
"0.4895565",
"0.4889013",
"0.48868772"
] | 0.614466 | 3 |
Same as ids but perform the query asynchronously and returns an ActiveRecord::Promise | def async_ids
async.ids
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def async_lookup(args)\n unless (Hash === args)\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all do |rows|\n yield(rows.any? ? rows.first : nil)\n end\n\n return\n end",
"def ids\n primary_key_array = Array(primary_key)\n\n if loaded?\n result = records.map do |record|\n if primary_key_array.one?\n record._read_attribute(primary_key_array.first)\n else\n primary_key_array.map { |column| record._read_attribute(column) }\n end\n end\n return @async ? Promise::Complete.new(result) : result\n end\n\n if has_include?(primary_key)\n relation = apply_join_dependency.group(*primary_key_array)\n return relation.ids\n end\n\n columns = arel_columns(primary_key_array)\n relation = spawn\n relation.select_values = columns\n\n result = if relation.where_clause.contradiction?\n ActiveRecord::Result.empty\n else\n skip_query_cache_if_necessary do\n klass.connection.select_all(relation, \"#{klass.name} Ids\", async: @async)\n end\n end\n\n result.then { |result| type_cast_pluck_values(result, columns) }\n end",
"def await_results(promises)\n ResultSet.new(promises.map(&:value))\n end",
"def parallel_queries\n raise \"Model is missing id column\" if ar_class.columns.none? { |column| column.name == \"id\" }\n\n if ar_class.respond_to?(:scrubbable_scope)\n num_records = ar_class.send(:scrubbable_scope).count\n else\n num_records = ar_class.count\n end\n return [] if num_records == 0 # no records to import\n\n record_window_size, modulus = num_records.divmod(num_of_batches)\n if record_window_size < 1\n record_window_size = 1\n modulus = 0\n end\n\n start_id = next_id(ar_class: ar_class, offset: 0)\n queries = num_of_batches.times.each_with_object([]) do |_, queries|\n next unless start_id\n\n end_id = next_id(ar_class: ar_class, id: start_id, offset: record_window_size - 1)\n if modulus > 0\n end_id = next_id(ar_class: ar_class, id: end_id)\n modulus -= 1\n end\n queries << { id: start_id..end_id } if end_id\n start_id = next_id(ar_class: ar_class, id: end_id || start_id)\n end\n\n # just in case new records are added since we started, extend the end ID\n queries[-1] = [\"#{ar_class.quoted_table_name}.id >= ?\", queries[-1][:id].begin] if queries.any?\n\n queries\n end",
"def fetch_records(ids)\n model.where(id: ids)\n end",
"def fetch_and_enqueue(force_all_syncs=false)\n begin\n if force_all_syncs\n query = db.fetch(%Q(\n SELECT r.name, r.id FROM #{relation} r, users u WHERE\n (r.state = '#{CartoDB::Synchronization::Member::STATE_SUCCESS}'\n OR r.state = '#{CartoDB::Synchronization::Member::STATE_SYNCING}')\n AND u.id = user_id AND u.state = '#{Carto::User::STATE_ACTIVE}'\n ))\n else\n query = db.fetch(%Q(\n SELECT r.name, r.id, r.user_id FROM #{relation} r, users u\n WHERE EXTRACT(EPOCH FROM r.run_at) < #{Time.now.utc.to_f}\n AND u.id = user_id AND u.state = '#{Carto::User::STATE_ACTIVE}'\n AND\n (\n r.state = '#{CartoDB::Synchronization::Member::STATE_SUCCESS}'\n OR (r.state = '#{CartoDB::Synchronization::Member::STATE_FAILURE}'\n AND r.retried_times < #{CartoDB::Synchronization::Member::MAX_RETRIES})\n )\n ORDER BY ran_at\n ))\n end\n success = true\n rescue Exception => e\n success = false\n print_log(\"ERROR fetching sync tables: #{e.message}, #{e.backtrace}\", true)\n end\n\n if success\n print_log \"Fetched #{query.count} records\"\n force_all_syncs ? enqueue_all(query) : enqueue_rate_limited(query)\n end\n\n self\n end",
"def promise_find(id)\n sid = id.to_s\n record_in_progress = if _record_cache.has_key?(sid)\n _record_cache[sid]\n else\n self.new(id: id)\n end\n _promise_find(id, record_in_progress)\n end",
"def execute\n # First, execute the SQL, applying the valid after_filters\n ret = apply_after_filters(execute_sql)\n\n # Set changed property to true\n changed\n\n # Notify all observers of the ids of the current result\n # set\n notify_observers(\n ret.collect{|instance| instance.send(model.primary_key)},\n self\n )\n\n # Reset the Query\n reset!\n\n # Return the results\n ret\n end",
"def async_result()\n #This is a stub, used for indexing\n end",
"def run(&block)\n @repository.query(self, &block)\n end",
"def results\n @scope.where(@scope.primary_key => @ids).to_a\n end",
"def fetch_multi(*ids)\n ensure_base_model\n raise_if_scoped\n raise NotImplementedError, \"fetching needs the primary index enabled\" unless primary_cache_index_enabled\n options = ids.extract_options!\n ids.flatten!(1)\n records = if IdentityCache.should_use_cache?\n require_if_necessary do\n cache_keys = ids.map {|id| rails_cache_key(id) }\n key_to_id_map = Hash[ cache_keys.zip(ids) ]\n key_to_record_map = {}\n\n coders_by_key = IdentityCache.fetch_multi(cache_keys) do |unresolved_keys|\n ids = unresolved_keys.map {|key| key_to_id_map[key] }\n records = find_batch(ids)\n key_to_record_map = records.compact.index_by{ |record| rails_cache_key(record.id) }\n records.map {|record| coder_from_record(record) }\n end\n\n cache_keys.map{ |key| key_to_record_map[key] || record_from_coder(coders_by_key[key]) }\n end\n else\n find_batch(ids)\n end\n records.compact!\n prefetch_associations(options[:includes], records) if options[:includes]\n records\n end",
"def to_a\n fetch(ids)\n end",
"def to_a\n fetch(ids)\n end",
"def fetch(ids)\n data = nil\n\n model.synchronize do\n ids.each do |id|\n redis.queue(\"HGETALL\", namespace[id])\n end\n\n data = redis.commit\n end\n\n return [] if data.nil?\n\n [].tap do |result|\n data.each_with_index do |atts, idx|\n result << model.new(Utils.dict(atts).update(:id => ids[idx]))\n end\n end\n end",
"def query(sql, name = nil) #:nodoc:\n #log(sql, name) do\n #TODO: @async\n select_rows sql, name\n end",
"def async_pick(*column_names)\n async.pick(*column_names)\n end",
"def query(&block)\n @delegate.query(block)\n end",
"def query(rows)\n join_rows = fetch_join_rows(rows)\n assoc_ids = join_rows.map { |row| row[1] }.compact.uniq\n yield assoc_ids.any? ? base_scope.where(@ref.association_primary_key => assoc_ids) : nil, join_rows\n end",
"def retrieve_tasks\n Query.get_data(@values[:id], @values[:start_time], @values[:end_time])\n end",
"def retrieve_tasks\n Query.get_data(@values[:id], @values[:start_time], @values[:end_time])\n end",
"def retrieve_tasks\n Query.get_data(@values[:id], @values[:start_time], @values[:end_time])\n end",
"def query(&block)\n items = assert_connected(table).query(&block)\n results = []\n items.each { |i| results << new(i) }\n results\n end",
"def execute_sql\n # add conditions including the cache_ids and retrieve a count and all of the records\n return @model.find(:all,to_active_record)\n end",
"def execute\n # build the query string\n # run the query\n # return the results\n end",
"def query(&blk)\n @adapter.query(collection, self, &blk)\n end",
"def collection_query_method(name, options = { default_result: []})\n # @!method promise_[name]\n # @return [Promise] on success the .then block will receive the result of the RPC call as arg\n # on failure the .fail block will receive the HTTP response object as arg\n define_method(\"promise_#{name}\") do |*args|\n name_args = self.class._name_args(name, *args)\n @fetch_states[name_args] = 'i'\n unless @rest_methods.has_key?(name)\n @rest_methods[name] = {}.merge!(options)\n @update_on_link[name_args] = {}\n end\n @rest_methods[name_args] = { result: options[:default_result] } unless @rest_methods.has_key?(name_args) && @rest_methods[name_args].has_key?(:result)\n raise \"#{self.class.to_s}[_no_id_].#{name}, can't execute instance collection_query_method without id!\" unless self.id\n self.class._promise_get_or_patch(\"#{resource_base_uri}/#{self.id}/methods/#{name}.json?timestamp=#{`Date.now() + Math.random()`}\", *args).then do |response_json|\n collection = self.class._convert_array_to_collection(response_json[:result], self)\n @rest_methods[name_args][:result] = collection\n @fetch_states[name_args] = 'f'\n _notify_observers\n @rest_methods[name_args][:result]\n end.fail do |response|\n error_message = \"#{self.class.to_s}[#{self.id}].#{name}, a collection_query_method, failed to execute!\"\n `console.error(error_message)`\n response\n end\n end\n # @!method [name]\n # @return result either the default_result ass specified in the options or the real result if the RPC call already finished\n define_method(name) do |*args|\n _register_observer\n name_args = self.class._name_args(name, *args)\n unless @rest_methods.has_key?(name)\n @rest_methods[name] = {}.merge!(options)\n @update_on_link[name_args] = {}\n end\n @rest_methods[name_args] = { result: options[:default_result] } unless @rest_methods.has_key?(name_args) && @rest_methods[name_args].has_key?(:result)\n unless @fetch_states.has_key?(name_args) && 'fi'.include?(@fetch_states[name_args])\n self.send(\"promise_#{name}\", *args)\n end\n @rest_methods[name_args][:result]\n end\n # @!method update_[name] mark internal structures so that the method is called again once it is requested again\n # @return nil\n define_method(\"update_#{name}\") do |*args|\n @fetch_states[self.class._name_args(name, *args)] = 'u'\n nil\n end\n end",
"def execute(async)\n f = Fiber.current\n begin\n conn = acquire(f)\n conn.acquired_for_connection_pool += 1\n yield conn\n ensure\n conn.acquired_for_connection_pool -= 1\n conn.run_postponed_queries if conn.acquired_for_connection_pool == 0\n release(f) if !async && conn.acquired_for_connection_pool == 0\n end\n end",
"def query_result_set(query)\n\t result = ValueSet.new\n\t call(:query_result_set, query) do |marshalled_set|\n\t\tfor task in marshalled_set\n\t\t task = local_object(task)\n\t\t Distributed.keep.ref(task)\n\t\t result << task\n\t\tend\n\t end\n\n\t result\n\tend",
"def query(sql, *bind_values)\n @pool.acquire { |conn| conn.query(sql, *bind_values).first }\n end",
"def query_wait sql, waiting_time = 10\n result = db[sql].all\n if result.empty?\n if waiting_time != 0\n sleep 1\n result = query_wait(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def execute_query(sql, args)\n @db.log_connection_yield(sql, self, args){args ? async_exec(sql, args) : async_exec(sql)}\n end",
"def getDBArray(connection, query, id1, *id2)\r\n dbi_query = connection.prepare(query)\r\n dbi_query.execute(id1, *id2)\r\n #fetch the result\r\n return dbi_query.fetch_all\r\nend",
"def async_pluck(*column_names)\n async.pluck(*column_names)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def multi_query(sql, *bind_values)\n @pool.acquire { |conn| conn.query(sql, *bind_values) }\n end",
"def synchronize_resultset; end",
"def sync\n raise \".sync can only be used on the client\" if Volt.client?\n\n result = nil\n error = nil\n\n self.then do |val|\n result = val\n end.fail do |err|\n error = err\n end\n\n if error\n err_str = \"Exception in Promise at .sync: #{error.inspect}\"\n err_str += error.backtrace.join(\"\\n\")\n Volt.logger.error(err_str)\n fail error\n else\n return result\n end\n end",
"def get_actor_info(id)\n query = %Q{\n SELECT actors.id, actors.name, movies.title, movies.id AS movie_id, cast_members.character FROM actors\n JOIN cast_members ON cast_members.actor_id = actors.id\n JOIN movies ON movies.id = cast_members.movie_id\n WHERE actors.id = $1\n }\n\n actor_info = db_connection do |conn|\n conn.exec_params(query, [id])\n end\n\n actor_info.to_a\nend",
"def all(*promises)\n Q.all(@loop, *promises)\n end",
"def do_id_query(ids)\n responses = []\n ids.each do |k, v|\n new_key_value = map_key(k, v)\n next if new_key_value.blank? #we probably have bad ISBN, could be bad key though\n responses << get_thing(new_key_value)\n end\n selected = responses.map { |r| r['result'] }.flatten.compact.uniq\n return selected\n end",
"def friend_ids(query={})\n perform_get(\"/friends/ids.json\", :query => query)\nend",
"def promise_all\n _class_fetch_states[:all] = 'i'\n _promise_get(\"#{resource_base_uri}.json?timestamp=#{`Date.now() + Math.random()`}\").then do |response|\n collection = _convert_array_to_collection(response.json[self.to_s.underscore.pluralize])\n _class_fetch_states[:all] = 'f'\n _notify_class_observers\n warn_message = \"#{self.to_s}.all has been called. This may potentially load a lot of data and cause memory and performance problems.\"\n `console.warn(warn_message)`\n collection\n end.fail do |response|\n error_message = \"#{self.to_s}.all failed to fetch records!\"\n `console.error(error_message)`\n response\n end\n end",
"def album #this is a helper method\n sql = \"\n SELECT * FROM albums\n WHERE artist_id = $1\"\n values = [@id]\n albums_info = SqlRunner.run(sql, values)\n all_their_albums = albums_info.map { |album| Album.new(album) }\n return all_their_albums\n end",
"def fetch_multi(*ids)\n options = ids.extract_options!\n if IdentityCache.should_cache?\n\n require_if_necessary do\n cache_keys = ids.map {|id| rails_cache_key(id) }\n key_to_id_map = Hash[ cache_keys.zip(ids) ]\n\n objects_by_key = IdentityCache.fetch_multi(*key_to_id_map.keys) do |unresolved_keys|\n ids = unresolved_keys.map {|key| key_to_id_map[key] }\n records = find_batch(ids, options)\n records.compact.each(&:populate_association_caches)\n records\n end\n\n cache_keys.map {|key| objects_by_key[key] }.compact\n end\n\n else\n find_batch(ids, options)\n end\n end",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def get_ids_from(table_name)\n DB::Queries.get_ids_from(table: table_name)\nend",
"def fetch(sql, *params)\n rs = self.execute(sql, *params)\n self.execute(\"flush privileges\") # Always flush in case user wants to alter users\n return [] if self.interpreter.preview? && ! rs\n return rs.fetch_all rescue nil\n end",
"def search_results_from_ids(ids)\n where(:id => ids).preload(searchable_options[:preload]).to_a\n end",
"def query_single(sql, *params)\n results = run(sql, *params)\n results.each(as: :array, :first => true).first\n end",
"def player_ids(league_id)\n database do |db|\n # return id\n return db.execute('SELECT PlayerID FROM Player\n WHERE LeagueID = :league_id\n ORDER BY PlayerID',\n league_id).flatten\n end\nend",
"def query_empty sql, waiting_time = 10\n result = db[sql].all\n if !result.empty?\n if waiting_time != 0\n sleep 1\n result = query(sql, waiting_time - 1)\n end\n end\n return result\n end",
"def associated_records(ids)\n max_ids_in_a_list = connection.ids_in_list_limit || ids.size\n records = []\n ids.each_slice(max_ids_in_a_list) do |some_ids|\n records += yield(some_ids)\n end\n records\n end",
"def run\n Concurrent::Promise.zip(*@promises).value!\n end",
"def getDBValue(connection, query, id1, *id2)\r\n dbi_query = connection.prepare(query)\r\n dbi_query.execute(id1, *id2)\r\n #fetch the result\r\n return dbi_query.fetch\r\nend",
"def all_ids\n db.transaction(true) do |db|\n extract_model_ids(db)\n end\n end",
"def execute_query(query)\n ActiveRecord::Base.connection.select_all(query)\n end",
"def execute\n klass.collection.find(selector, process_options) || []\n end",
"def query(sql)\n if NB.neverblocking? && NB.reactor.running?\n send_query sql\n NB.wait(:read, IO.new(socket))\n get_result\n else\n super(sql)\n end\n end",
"def query_async sparql\n @executor.post do\n logger.debug('SPARQL Query', endpoint: @endpoint_url, sparql:)\n yield [nil, query(sparql)]\n rescue StandardError => e\n yield [e, nil]\n end\n end",
"def query_show_ids(&block)\n @source.query_with_pagination(@queries[:shows], @queries[:count_shows], &block)\n end",
"def get_all_actors\n query = %Q{\n SELECT * FROM actors\n ORDER BY name\n }\n\n results = db_connection do |conn|\n conn.exec(query)\n end\n\n results.to_a\nend",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def get_object_contents_ids params\n sequel_db = get_db params[:database]\n sequel_db.transaction do\n sequel_db[:objects].where(:location_object_id => params[:persistence_id]).select_map(:id)\n end\n end",
"def fetch_rows(sql, opts=OPTS, &block)\n db.execute(sql){|result| process_result_set(result, opts, &block)}\n self\n end",
"def existing_ids\n return Thread.current[:existing_ids] if Thread.current[:existing_ids]\n\n ids_in = [].tap do |ids|\n transformer.map_column(update_on, records).each_slice(999) do |slice|\n ids << destination_model.arel_table[update_on].in(slice).to_sql\n end\n end \n \n Thread.current[:existing_ids] = Hash[destination_model.where(ids_in.join(\" OR \")).pluck(update_on, :id)]\n end",
"def get_ids(params)\n if params[:ids]\n type = [\"doi\", \"pmid\", \"pmcid\", \"arxiv\", \"wos\", \"scp\", \"ark\", \"url\"].find { |t| t == params[:type] } || \"pid\"\n type = \"canonical_url\" if type == \"url\"\n ids = params[:ids].nil? ? nil : params[:ids].split(\",\").map { |id| get_clean_id(id) }\n collection = Work.where(works: { type => ids })\n elsif params[:q]\n collection = Work.query(params[:q])\n elsif params[:publisher_id] && publisher = cached_publisher(params[:publisher_id])\n collection = Work.where(publisher_id: publisher.id)\n elsif params[:contributor_id] && contributor = Contributor.where(pid: params[:contributor_id]).first\n collection = Work.joins(:contributions).where(\"contributions.contributor_id = ?\", contributor.id)\n elsif params[:id]\n id_hash = get_id_hash(params[:id])\n if id_hash.present?\n key, value = id_hash.first\n collection = Work.where(key => value)\n else\n collection = Work.none\n end\n else\n collection = Work.tracked\n end\n\n if params[:source_id] && source = cached_source(params[:source_id])\n collection = collection.joins(:results)\n .where(\"results.source_id = ?\", source.id)\n .where(\"results.total > 0\")\n end\n\n if params[:relation_type_id] && relation_type = cached_relation_type(params[:relation_type_id])\n collection = collection.joins(:relations)\n .where(\"relations.relation_type_id = ?\", relation_type.id)\n end\n\n if params[:registration_agency_id] && registration_agency = cached_registration_agency(params[:registration_agency_id])\n collection = collection.where(registration_agency_id: registration_agency.id)\n end\n\n collection\n end",
"def call_async(method, *args)\n require 'celluloid'\n return Celluloid::Future.new{ send_request(method, args) }\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def comments\n db_connection do |conn|\n conn.exec(\"SELECT * FROM comments\").to_a\n end\nend",
"def batch_query\n render nothing: true\n\n # logger.info \"params: \" + params.inspect\n #\n # endpoints_all = Endpoint.all\n # logger.info \"List of all endpoints:\"\n # endpoints_all.each do |endpoint|\n # logger.info ' name: ' + endpoint[:name] + ', url: ' + endpoint[:base_url]\n # end\n\n # Select endpoints using array of endpoint names;\n # Unfortunately, they are not necessarily unique\n endpoint_names = params[:endpoint_names]\n logger.info 'param endpoint_names:' + endpoint_names.inspect\n selected_endpoints = []\n if endpoint_names\n parse_array(endpoint_names).each do |endpoint_name|\n match_ep = Endpoint.find_by_name(endpoint_name)\n if match_ep\n logger.info endpoint_name.to_s + ' matches: ' + match_ep[:name].inspect\n selected_endpoints.push(match_ep)\n else\n logger.info 'WARNING: ' + endpoint_name.to_s + ' has no match!'\n end\n end\n end\n # logger.info 'selected endpoings: ' + selected_endpoints.inspect\n\n\n # users = User.all\n # users.each do |user|\n # logger.info 'username: ' + user[:username]\n # end\n\n # queries_all = Query.all\n # logger.info \"List of all queries:\"\n # queries_all.each do |query|\n # logger.info ' title: ' + query[:title] + ', desc: ' + query[:description]\n # end\n\n # Select query using array of query descriptions;\n # Unfortunately, they are not necessarily unique\n #query_titles = params[:query_titles]\n username = params[:username]\n current_user = User.find_by_username(username)\n if current_user\n query_descriptions = params[:query_descriptions]\n # logger.info 'param query_descriptions:' + query_descriptions.inspect\n selected_queries = []\n if query_descriptions\n parse_array(query_descriptions).each do |query_desc|\n match_query = current_user.queries.find_by_description(query_desc)\n if match_query\n logger.info query_desc + ' matches: ' + match_query[:description].inspect\n selected_queries.push(match_query)\n else\n logger.info 'WARNING: ' + query_desc + ' has no match!'\n end\n end\n end\n end\n # logger.info 'selected queries: ' + selected_queries.inspect\n\n if selected_endpoints && !selected_endpoints.empty? &&\n selected_queries && !selected_queries.empty?\n notify = params[:notification]\n selected_queries.each do |eachQuery|\n #Parallel.each(selected_queries, :in_threads=>15) do |eachQuery|\n # execute the query, and pass in the endpoints and if the user should be notified by email when execution completes\n # logger.info 'title: ' + eachQuery[:title].inspect\n # logger.info 'desc: ' + eachQuery[:description].inspect\n # logger.info 'user_id: ' + eachQuery[:user_id].inspect\n eachQuery.execute(selected_endpoints, notify)\n end\n else\n flash[:alert] = 'Cannot execute a query if no endpoints are provided.'\n end\n end",
"def all_by_artist()\n\n sql = \"\n SELECT * FROM albums\n WHERE artist_id = $1;\n \"\n\n album_hashes = SqlRunner.run(sql, [@id])\n album_list = album_hashes.map {|album_hash| Album.new(album_hash)}\n return album_list\n\nend",
"def get_ids_for_query\n if param.field.options[:definition]\n definition = param.field.options[:definition]\n else\n # Set up a definition\n definition = Definition.new\n definition.base = param.field.options[:base].is_a?(Proc) ? param.field.options[:base].call : param.field.options[:base]\n\n # Get the fields which we should search for\n fields = @field.is_a?(Array) ? @field : [@field]\n fields.each do |field|\n definition.fields << DefinitionField.new(field, :condition => Local, :value_transmogrification => param.field.options[:value_transmogrification])\n end\n end\n\n # Set up a query\n query = Query.new(definition)\n\n # Add all the fields\n query.group(:any) do |params|\n fields.each do |field|\n params << query.param(field, @operator, @value)\n end\n end\n\n ids = query.results.pluck(param.field.options[:foreign_key])\n\n if @operator == :blank\n all_ids = param.query.definition.base.pluck(:id)\n present_ids = definition.base.pluck(param.field.options[:foreign_key])\n ids = (all_ids - present_ids) + ids\n end\n\n ids\n\n end",
"def eager_load_results(eo, &block)\n rows = eo[:rows]\n initialize_association_cache(rows) unless eo[:initialize_rows] == false\n if eo[:id_map]\n ids = eo[:id_map].keys\n return ids if ids.empty?\n end\n strategy = eager_limit_strategy\n cascade = eo[:associations]\n eager_limit = nil\n\n if eo[:eager_block] || eo[:loader] == false\n ds = eager_loading_dataset(eo)\n\n strategy = ds.opts[:eager_limit_strategy] || strategy\n\n eager_limit =\n if el = ds.opts[:eager_limit]\n raise Error, \"The :eager_limit dataset option is not supported for associations returning a single record\" unless returns_array?\n strategy ||= true_eager_graph_limit_strategy\n if el.is_a?(Array)\n el\n else\n [el, nil]\n end\n else\n limit_and_offset\n end\n\n strategy = true_eager_graph_limit_strategy if strategy == :union\n # Correlated subqueries are not supported for regular eager loading\n strategy = :ruby if strategy == :correlated_subquery\n strategy = nil if strategy == :ruby && assign_singular?\n objects = apply_eager_limit_strategy(ds, strategy, eager_limit).all\n elsif strategy == :union\n objects = []\n ds = associated_dataset\n loader = union_eager_loader\n joiner = \" UNION ALL \"\n ids.each_slice(subqueries_per_union).each do |slice|\n objects.concat(ds.with_sql(slice.map{|k| loader.sql(*k)}.join(joiner)).to_a)\n end\n ds = ds.eager(cascade) if cascade\n ds.send(:post_load, objects)\n else\n loader = placeholder_eager_loader\n loader = loader.with_dataset{|dataset| dataset.eager(cascade)} if cascade\n objects = loader.all(ids)\n end\n\n objects.each(&block)\n if strategy == :ruby\n apply_ruby_eager_limit_strategy(rows, eager_limit || limit_and_offset)\n end\n end",
"def self_query\n self.class.where(id: id)\n end",
"def query(query, values)\n handle.exec(query, values)\n end",
"def find_all_by_id(id)\n find_all_by(:id, id)\n end",
"def query sql\n result = db[sql].all\n return result\n end",
"def execute_query(sql, args)\n\t\t\t\t\t@db.log_connection_yield(sql, self, args){args ? self.async_exec(sql, args) : self.async_exec(sql)}\n\t\t\t\tend",
"def fetch_pending(min_id, per_page, options = {})\n options = {\n :limit => per_page,\n }.merge(options)\n video_ids = self.pending_video_ids.rangebyscore(min_id || \"-INF\", \"INF\", options)\n video_ids.map {|id| Video.find_by_id(id) }\n end",
"def query(sql)\n if Fiber.respond_to? :current and Fiber.current[:neverblock]\t\t \n send_query sql\n @fiber = Fiber.current\t\t \n Fiber.yield \n else\t\t \n super(sql)\n end\t\t\n end",
"def run_query(q)\n return sky_table.query(q)\n end",
"def find(*args)\n return super if block_given?\n find_with_ids(*args)\n end",
"def async(method, *args)\n Resque.enqueue(self.class, id, method, *args)\n end",
"def perform_query\n Rails.logger.info queries.to_sql\n queries\n end",
"def find(*parse_ids, type: :parallel, compact: true)\n # flatten the list of Object ids.\n parse_ids.flatten!\n parse_ids.compact!\n # determines if the result back to the call site is an array or a single result\n as_array = parse_ids.count > 1\n results = []\n\n if type == :batch\n # use a .in query with the given id as a list\n results = self.class.all(:id.in => parse_ids)\n else\n # use Parallel to make multiple threaded requests for finding these objects.\n # The benefit of using this as default is that each request goes to a specific URL\n # which is better than Query request (table scan). This in turn allows for caching of\n # individual objects.\n results = parse_ids.threaded_map do |parse_id|\n next nil unless parse_id.present?\n response = client.fetch_object(parse_class, parse_id)\n next nil if response.error?\n Parse::Object.build response.result, parse_class\n end\n end\n # removes any nil items in the array\n results.compact! if compact\n\n as_array ? results : results.first\n end",
"def results( id )\n if @db != nil\n\n end\n\n return nil\n end",
"def execute_find(raw = false)\n get_records\n cached = AridCache::CacheProxy::Result.new\n\n if !records.is_a?(Enumerable) || (!records.empty? && !records.first.is_a?(::ActiveRecord::Base))\n cached = records # some base type, cache it as itself\n else\n cached.ids = records.collect(&:id)\n cached.count = records.size\n if records.respond_to?(:proxy_reflection) # association proxy\n cached.klass = records.proxy_reflection.klass\n elsif !records.empty?\n cached.klass = records.first.class\n else\n cached.klass = object_base_class\n end\n end\n Rails.cache.write(cache_key, cached, opts_for_cache)\n self.cached = cached\n\n # Return the raw result?\n return self.cached if raw\n\n # An order has been specified. We have to go to the database\n # to order because we can't be sure that the current order is the same as the cache.\n if cached.is_a?(AridCache::CacheProxy::Result) && combined_options.include?(:order)\n self.klass = self.cached.klass # TODO used by fetch_from_cache needs refactor\n fetch_from_cache\n else\n process_result_in_memory(records)\n end\n end",
"def contacts\n Person.find_by_sql(contact_query(\"people.id, people.created_at\"))\n end",
"def perform(&run_interp)\n @queries.each {|q| q.perform(&run_interp)}\n end",
"def find_ids_with_ferret(q, options = {}, &block)\n aaf_index.find_ids(q, options, &block)\n end",
"def run_eager\n root_operation = query.selected_operation\n root_op_type = root_operation.operation_type || \"query\"\n root_type = schema.root_type_for_operation(root_op_type)\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = @response\n runtime_object = root_type.wrap(query.root_value, context)\n runtime_object = schema.sync_lazy(runtime_object)\n\n if runtime_object.nil?\n # Root .authorized? returned false.\n @response = nil\n else\n call_method_on_directives(:resolve, runtime_object, root_operation.directives) do # execute query level directives\n gathered_selections = gather_selections(runtime_object, root_type, root_operation.selections)\n # This is kind of a hack -- `gathered_selections` is an Array if any of the selections\n # require isolation during execution (because of runtime directives). In that case,\n # make a new, isolated result hash for writing the result into. (That isolated response\n # is eventually merged back into the main response)\n #\n # Otherwise, `gathered_selections` is a hash of selections which can be\n # directly evaluated and the results can be written right into the main response hash.\n tap_or_each(gathered_selections) do |selections, is_selection_array|\n if is_selection_array\n selection_response = GraphQLResultHash.new(nil, nil, false)\n final_response = @response\n else\n selection_response = @response\n final_response = nil\n end\n\n @dataloader.append_job {\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = selection_response\n # This is a less-frequent case; use a fast check since it's often not there.\n if (directives = selections[:graphql_directives])\n selections.delete(:graphql_directives)\n end\n call_method_on_directives(:resolve, runtime_object, directives) do\n evaluate_selections(\n runtime_object,\n root_type,\n root_op_type == \"mutation\",\n selections,\n selection_response,\n final_response,\n nil,\n )\n end\n }\n end\n end\n end\n delete_all_interpreter_context\n nil\n end",
"def all\n db.transaction(true) do\n ids = extract_model_ids(db)\n ids.map { |key| db[key] }\n end\n end",
"def all\n\t\tquery.execute\n end",
"def for_ids(*ids)\n field = klass.fields[\"_id\"]\n ids.flatten!\n if ids.size > 1\n any_in(:_id => ids.map{ |id| field.serialize(id) })\n else\n where(:_id => field.serialize(ids.first))\n end\n end",
"def index\n @project_promises = ProjectPromise.all\n end"
] | [
"0.6503299",
"0.64135385",
"0.5989049",
"0.59195966",
"0.59133464",
"0.56772715",
"0.56306785",
"0.5587754",
"0.5564351",
"0.5492134",
"0.54685205",
"0.5431442",
"0.5407983",
"0.5400428",
"0.5400428",
"0.5363246",
"0.5344006",
"0.5342844",
"0.53241116",
"0.53099006",
"0.53059727",
"0.53059727",
"0.53059727",
"0.5293713",
"0.5287213",
"0.5258211",
"0.52447385",
"0.52417904",
"0.5204503",
"0.51949733",
"0.51850456",
"0.51828104",
"0.5181104",
"0.5178621",
"0.5178392",
"0.5173602",
"0.5173602",
"0.5173602",
"0.51636297",
"0.5158966",
"0.5124977",
"0.5116962",
"0.51120466",
"0.510925",
"0.5104923",
"0.5095252",
"0.5094025",
"0.50933385",
"0.5084741",
"0.5076688",
"0.5073189",
"0.5072071",
"0.50612646",
"0.5058287",
"0.5052144",
"0.5047705",
"0.50426096",
"0.50378615",
"0.5020025",
"0.5017659",
"0.5015772",
"0.50064987",
"0.4994609",
"0.49937916",
"0.49920282",
"0.49853432",
"0.49824095",
"0.49767223",
"0.49728358",
"0.49615484",
"0.49577975",
"0.49510485",
"0.49510485",
"0.49481595",
"0.4947645",
"0.49461073",
"0.49374107",
"0.49347872",
"0.49186563",
"0.49118882",
"0.49053794",
"0.48997933",
"0.48985258",
"0.4897368",
"0.4886052",
"0.4881143",
"0.48811427",
"0.486258",
"0.4859896",
"0.48576188",
"0.4857202",
"0.48501813",
"0.48487765",
"0.48468956",
"0.48442206",
"0.48370212",
"0.48370108",
"0.48328763",
"0.48308372",
"0.48298046"
] | 0.61593217 | 2 |
Generate a SOAP message fragment for the object. | def soapify_for(msg, label = 'customFieldValues')
msg.add label do |submsg|
submsg.add 'customfieldId', @id
submsg.add 'key', @key
submsg.add_simple_array 'values', @values
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def to_s\n @soap\n end",
"def request_message\n return if @parts.empty?\n\n @request_message = Part.new do\n content_type 'multipart/related; type=\"text/xml\"'\n end\n\n soap_body = self.to_xml\n soap_message = Part.new do\n content_type 'text/xml; charset=utf-8'\n add_content_transfer_encoding\n body soap_body\n end\n soap_message.add_content_id \"<savon_soap_xml_part>\"\n @request_message.add_part(soap_message)\n @parts.each do |part|\n @request_message.add_part(part)\n end\n #puts @request_message\n @request_message\n end",
"def to_xml\n header = build_header\n body = build_body\n envelope = build_envelope\n envelope << header\n envelope << body\n doc = Ox::Document.new(version: '1.0')\n doc << envelope\n Ox.dump(doc)\n end",
"def to_s\n @document ||= @request.wsdl.body\n end",
"def get_nfg_soap_request_template\n \"<?xml version=\\\"1.0\\\" encoding=\\\"utf-8\\\"?><soap12:Envelope xmlns:xsi=\\\"http://www.w3.org/2001/XMLSchema-instance\\\" xmlns:xsd=\\\"http://www.w3.org/2001/XMLSchema\\\" xmlns:soap12=\\\"http://www.w3.org/2003/05/soap-envelope\\\"><soap12:Body>|body|</soap12:Body></soap12:Envelope>\"\n end",
"def to_s\n\t\t\ts = \"#{headername}\\n\" +\n\t\t\t\t\"{\\n\" +\n\t\t\t\t\"v=#{@version}\\n\" +\n\t\t\t\"hop=#{@hop}\\n\" +\n\t\t\t\"uid=#{@uid}\\n\" +\n\t\t\t\"class=#{@msgclass}\\n\" +\n\t\t\t\"source=#{@src_addr}\\n\"\n\n\t\t\ts << \"target=#{@target_addr}\\n\" if @target_addr\n\n\t\t\tif @headers\n\t\t\t\t@headers.each do |k, v|\n\t\t\t\t\ts << \"#{k}=#{v}\\n\"\n\t\t\t\tend\n\t\t\tend\n\n\t\t\ts << \"}\\n\"\n\n\t\t\tif @blocks\n\t\t\t\t@blocks.each do |name, block|\n\t\t\t\t\ts << \"#{name}\\n{\\n\"\n\t\t\t\t\tblock.each do |k, v|\n\t\t\t\t\t\ts << \"#{k}=#{v}\\n\"\n\t\t\t\t\tend\n\t\t\t\t\ts << \"}\\n\"\n\t\t\t\tend\n\t\t\tend\n\n\t\t\ts\n\t\tend",
"def build(locals={}, &block)\n set_locals(locals, block)\n @locals[:message][:version] = Ebay::Api.schema_version\n @locals.message_tag camelcase(@name.to_s =~ /_request$/ ? @name : \"#{@name}_request\")\n Savon::Builder.new(@name, @wsdl, @globals, @locals)\n end",
"def inspect\n \"#<Envelope::Message to=#{formatted_to} from=#{formatted_from} cc=#{formatted_cc} bcc=#{formatted_bcc} reply_to=#{formatted_reply_to} subject=\\\"#{subject}\\\" text_part=\\\"#{preview = (text_part || html_part); (preview && preview.gsub(/\\s+/, ' ') || 'No preview available')[0..50]}...\\\">\"\n end",
"def echo\n xml = Builder::XmlMarkup.new(:indent => 2)\n xml.instruct!\n xml.notification_echo do\n xml.payment_unique_id @payment_unique_id\n end\n xml.target!\n end",
"def inspect #:nodoc:\n s = \"#<#{self.class}:0x#{(self.object_id*2).to_s(16)} \"\n @header_object.each_pair do |k,v|\n s += \"(#{k.upcase} size=#{v[1]} offset=#{v[2]}) \" unless k == \"ASF_Header_Object\"\n end\n s += \"\\b>\"\n end",
"def buildXML(transform_obj)\n xml_string = '<PoortegoTransformResponse xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n xsi:noNamespaceSchemaLocation=\"' + \"#{ENV['POORTEGO_LOCAL_BASE']}/poortego/lib/core/poortego_transform/xml/PoortegoTransformResponse.xsd\" + '\">'\n \n xml_string << '<ResponseData>'\n ## Entities\n xml_string << '<Entities>'\n transform_obj.responseEntities.each do |responseEntity|\n xml_string << '<Entity'\n responseEntity.attributes.each do |key,value|\n xml_string << \" #{key}='#{value}'\"\n end\n xml_string << '>'\n responseEntity.additionalFields.each do |name,value|\n xml_string << \"<AdditionalField name='#{name}' value='#{value}' />\"\n end\n xml_string << '</Entity>'\n end\n xml_string << '</Entities>'\n \n ## Links\n xml_string << '<Links>'\n transform_obj.responseLinks.each do |responseLink|\n xml_string << '<Link'\n responseLink.attributes.each do |key,value|\n xml_string << \" #{key}='#{value}'\"\n end\n xml_string << '>'\n responseLink.additionalFields.each do |name,value|\n xml_string << \"<AdditionalField name='#{name}' value='#{value}' />\"\n end\n xml_string << '</Link>'\n end\n xml_string << '</Links>'\n xml_string << '</ResponseData>'\n \n ## Messages\n xml_string << '<ResponseMessages>'\n transform_obj.responseMessages.each do |responseMessage|\n xml_string << \"<Message \"\n xml_string << \"title='#{responseMessage.title}' \"\n xml_string << \"type='#{responseMessage.type}'>\"\n xml_string << \"#{responseMessage.body}\"\n xml_string << '</Message>'\n end\n xml_string << '</ResponseMessages>'\n xml_string << '</PoortegoTransformResponse>'\n @xml_response = xml_string\n return @xml_response\n end",
"def to_envelope\n \"ENVELOPE(#{west}, #{east}, #{north}, #{south})\"\n end",
"def emit_model_class!( file, model, &block )\n inheritance = if request_models.include?(model)\n \" : #{ base_api_namespace() }.RequestMessage\"\n elsif response_models.include?(model)\n \" : #{ base_api_namespace() }.ResponseMessage\"\n else\n \"\"\n end\n \n\n file << \"#{indent}[DataContract]\\n\"\n file << \"#{indent}public partial class #{ sanitize_model_class(model) }#{ inheritance } {\\n\\n\"\n \n @indent += 1\n yield\n @indent -= 1\n\n file << \"#{indent}}\"\n end",
"def to_s\n context = @solution.map{|k, v| \"#{k}: #{v}\"}.join(',')\n \"#<Twilio.Messaging.V1.ServiceContext #{context}>\"\n end",
"def generate_simple(opts, &block)\n Msf::Simple::Payload.generate_simple(self, opts, &block)\n end",
"def to_s\n \"#{@customer}: #{@body}\"\n end",
"def _render_soap(result, options)\n @namespace = NAMESPACE\n @operation = soap_action = request.env['wash_out.soap_action']\n action_spec = self.class.soap_actions[soap_action][:out].clone\n result = { 'value' => result } unless result.is_a? Hash\n result = HashWithIndifferentAccess.new(result)\n inject = lambda {|data, spec|\n spec.each do |param|\n if param.struct?\n inject.call(data[param.name], param.map)\n else\n param.value = data[param.name]\n end\n end\n }\n\n soap_response = render_to_string :template => 'wash_with_soap/response',\n :locals => { :result => inject.call(result, action_spec) }\n\n if options[:ws_security] == \"encrypt\" || options[:ws_security] == \"sign\" || options[:ws_security] == \"sign_encrypt\"\n soap_response = ws_security_apply(soap_response, options)\n end\n \n\n\n if is_exception?(soap_response)\n Rails.logger.error \"PHP_SCRIPT_ERROR #{ws_security_response}\"\n render :template => 'wash_with_soap/error', :status => 500,\n :locals => { :error_message => \"php_script_error\" }\n else\n render :xml => soap_response\n end\n end",
"def build\r\n builder = Builder::XmlMarkup.new\r\n builder.instruct!(:xml, encoding: 'UTF-8')\r\n builder.tag! :env, :Envelope, namespaces do |env|\r\n env.tag!(:env, :Header) do |env_header|\r\n create_header(env_header)\r\n end\r\n env.tag!(:env, :Body) do |env_body|\r\n create_body(env_body)\r\n end\r\n end\r\n end",
"def inspect\n str = +\"#<#{self.class.name}:0x#{object_id}\"\n str << \" id=\\\"#{@id}\\\"\" if @id\n str << '>'\n end",
"def to_s\n self.header + ' ' + @body.join(\"\\n \") + self.footer\n end",
"def to_s\n \"#<Envelope::Message to=#{formatted_to} from=#{formatted_from} subject=\\\"#{subject}\\\">\"\n end",
"def inspect\n size = length\n\n content = if size > 20\n \"start=#{self[0...10].to_s.inspect} end=#{self[-10..-1].to_s.inspect}\"\n else\n \"content=#{super}\"\n end\n\n \"#<#{self.class}:0x#{\"%x\" % object_id} bytes=#{size} #{content}>\"\n end",
"def build(x, envelope)\n x.complex(nil, ['http://schemas.xmlsoap.org/soap/envelope/', 'Envelope'], []) do |x|\n build_header(x, envelope.header)\n build_body(x, envelope.body)\n end\n end",
"def inspect\n context = @solution.map{|k, v| \"#{k}: #{v}\"}.join(',')\n \"#<Twilio.Messaging.V1.ServiceContext #{context}>\"\n end",
"def build_request(body, options)\n requires!(options, :xml_transaction_wrapper)\n xml = Builder::XmlMarkup.new :indent => 2\n xml.instruct!(:xml, :version => '1.0', :encoding => 'utf-8')\n xml.tag! 'soap:Envelope', { 'xmlns:soap' => 'http://schemas.xmlsoap.org/soap/envelope/', \n 'xmlns:xsi' => 'http://www.w3.org/2001/XMLSchema-instance', \n 'xmlns:xsd' => 'http://www.w3.org/2001/XMLSchema'} do\n xml.tag! 'soap:Body' do\n xml.tag! options[:xml_transaction_wrapper], {'xmlns' => \"https://www.thepaymentgateway.net/\"} do\n unless options[:xml_transaction_wrapper] == \"ThreeDSecureAuthentication\"\n xml.tag! 'PaymentMessage' do\n add_merchant_data(xml, options)\n xml << body\n end\n else\n xml << body\n end\n end\n end\n end\n xml.target! \n end",
"def to_s\n return self.xml_header + self.xml_docs.join + self.xml_footer\n end",
"def generate\n header + dict + footer\n end",
"def build_soap_message(params)\n Hash[params.map { |k, v| [\"v2:#{k}\", v] }]\n end",
"def inspect\n \"#<#{self.class.name}:#{self.object_id} name='#{name}' service_url='#{self.service_url}'>\"\n end",
"def inspect\n \"#<#{self.class.name}:#{self.object_id} name='#{name}' service_url='#{self.service_url}'>\"\n end",
"def inspect\n \"#<#{self.class.name}:#{self.object_id} name='#{name}' service_url='#{self.service_url}'>\"\n end",
"def to_s\n \"#<#{self.class.name}:#{object_id} #{info}>\"\n end",
"def to_xml(options={}, &block)\n super(options.reverse_merge(:include => :parts), &block)\n end",
"def inspect\n vars = options.map { |k, v| \"@#{k}=#{v}\" }.join(\", \")\n \"#<#{self.class}:#{format('0x%014x', object_id)} #{vars}>\"\n end",
"def inspect\n string = \"svc name [#{@service_name}], seqid #{sequence_id.inspect}, \"\n string << \"addr #{@address.inspect}, \"\n string << \"payload #{@payload.inspect}\"\n string\n end",
"def to_s\n text = \"\"\n text << self.header\n @body.each do |section|\n text << section\n end\n text << self.footer\n\n #return\n text\n end",
"def inspect\n \"#<#{self.class.name} @body=\\\"#{self.body}\\\">\"\n end",
"def generate_response_xml\n builder = Builder::XmlMarkup.new\n builder.instruct!\n\n builder.imsx_POXEnvelopeResponse(\"xmlns\" => \"http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0\") do |env|\n env.imsx_POXHeader do |header|\n header.imsx_POXResponseHeaderInfo do |info|\n info.imsx_version \"V1.0\"\n info.imsx_messageIdentifier @message_identifier || IMS::LTI::generate_identifier\n info.imsx_statusInfo do |status|\n status.imsx_codeMajor @code_major\n status.imsx_severity @severity\n status.imsx_description @description\n status.imsx_messageRefIdentifier @message_ref_identifier\n status.imsx_operationRefIdentifier @operation\n end\n end\n end #/header\n env.imsx_POXBody do |body|\n unless unsupported?\n if @operation == OutcomeRequest::READ_REQUEST\n body.tag!(@operation + 'Response') do |request|\n request.result do |res|\n res.resultScore do |res_score|\n res_score.language \"en\" # 'en' represents the format of the number\n res_score.textString @score.to_s\n end\n end #/result\n end\n else\n body.tag!(@operation + 'Response')\n end #/operationResponse\n end\n end #/body\n end\n end",
"def dumpMethod( operation, binding )\n methodName = createMethodName( operation.name.name )\n methodNameAs = operation.name.name\n params = collectParams( operation )\n soapAction = binding.soapOperation.soapAction\n namespace = binding.input.soapBody.namespace\n paramsStr = param2str( params )\n if paramsStr.empty?\n paramsStr = '[]'\n else\n paramsStr = \"[\\n\" << paramsStr << \" ]\"\n end\n return <<__EOD__\n[ #{ dq( methodNameAs ) }, #{ dq( methodName ) }, #{ paramsStr },\n #{ dq( soapAction ) }, #{ dq( namespace ) } ]\n__EOD__\n end",
"def echo\n xml = Builder::XmlMarkup.new(:indent => 2)\n xml.instruct!\n xml.notification_echo do\n xml.unique_id @unique_id\n end\n xml.target!\n end",
"def echo\n xml = Builder::XmlMarkup.new(:indent => 2)\n xml.instruct!\n xml.notification_echo do\n xml.unique_id @unique_id\n end\n xml.target!\n end",
"def _generate_wsdl\n @map = self.class.soap_actions\n @namespace = NAMESPACE\n @name = controller_path.gsub('/', '_')\n\n render :template => 'wash_with_soap/wsdl'\n end",
"def xml\n @xml ||= begin\n builder = ::Builder::XmlMarkup.new(:indent => 4)\n\n authed_xml_as_string(builder) do\n builder.GetReceiptInfoCall do\n builder.ReceiptFilter do\n builder.ReceiptId(id)\n end\n end\n end\n end\n end",
"def builder(opts = {})\n raw_builder(opts) do |xml|\n xml.instruct! :xml, :version => '1.0', :encoding => 'UTF-8'\n xml.epp('xmlns' => EPPClient::SCHEMAS_URL['epp'], 'xmlns:epp' => EPPClient::SCHEMAS_URL['epp']) do\n yield xml\n end\n end\n end",
"def generate\n result = <<-eos.strip_heredoc\n framework module #{target.product_module_name} {\n umbrella header \"#{target.umbrella_header_path.basename}\"\n\n export *\n module * { export * }\n eos\n\n result << \"\\n#{generate_private_header_exports}\" unless private_headers.empty?\n result << \"}\\n\"\n end",
"def inspect\n attributes = [\n \"name=#{name.inspect}\",\n \"state=#{state.inspect}\",\n \"description=#{description.inspect}\",\n \"adapter=#{adapter.name.inspect}\",\n ]\n \"#<#{self.class.name}:#{object_id} #{attributes.join(', ')}>\"\n end",
"def to_ofx\r\n buf = \"\"\r\n # Set margin=6 to indent it nicely within the output from Transaction.to_ofx\r\n x = Builder::XmlMarkup.new(:target => buf, :indent => 2, :margin=>6)\r\n x.PAYEE {\r\n x.NAME name\r\n x.ADDR1 address\r\n x.CITY city\r\n x.STATE state\r\n x.POSTALCODE postalcode\r\n x.COUNTRY country unless country.nil? # minOccurs=\"0\" in schema (above)\r\n x.PHONE phone\r\n }\r\n return buf\r\n end",
"def to_s\n [\n +\"#<#{self.class}\",\n +\" id=#{id}\",\n +\" uri=#{uri}\",\n +\" endpoints=#{endpoints_string}\",\n +\">\",\n ].join(\"\")\n end",
"def to_s\n context = @solution.map{|k, v| \"#{k}: #{v}\"}.join(',')\n \"#<Twilio.Proxy.V1.ServiceContext #{context}>\"\n end",
"def request_xml(opts)\n envelope_ns_key = \"#{namespace_key(:envelope)}\"\n builder = Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|\n xml[envelope_ns_key].Envelope(namespace_hash) {\n xml = header_xml(xml, opts[:wsa])\n xml = body_xml(xml, opts[:message], opts[:params], opts[:extra])\n }\n end\n end",
"def inspect\n attributes = [\n \"feature_name=#{feature_name.inspect}\",\n \"gate_key=#{gate_key.inspect}\",\n ]\n \"#<#{self.class.name}:#{object_id} #{attributes.join(', ')}>\"\n end",
"def envelope\n \"ENVELOPE(#{west}, #{east}, #{north}, #{south})\"\n end",
"def put_xml(obj = nil, status = 200, append_response = false)\n @performed_render = true\n \n obj.set_pretty_print(true)\n text = obj.to_xml\n \n response.content_type = 'text/xml'\n response.status = status\n \n if append_response\n response.body ||= ''\n response.body << text.to_s\n else\n response.body = case text\n when Proc then text\n when nil then \" \" # Safari doesn't pass the headers of the return if the response is zero length\n else \n text.to_s\n end\n end\n end",
"def inspect\n fields = serializable_hash.map { |k, v| \"#{k}=#{v}\" }\n \"#<#{self.class.name}:#{object_id} #{fields.join(' ')}>\"\n end",
"def aim_xml(obj, fields, extra_data = {}, name = obj.class.to_s)\r\n # \tfields\r\n # to_hash(obj, fields)\r\n # to_hash(obj, fields).merge(extra_data)\r\n # Builder::XmlMarkup.new.tag! name, to_hash(obj, fields).merge(extra_data)\r\n y = to_hash(obj, fields)\r\n b = Builder::XmlMarkup.new.tag!(obj.class.to_s, y.merge(extra_data))\r\n end",
"def to_xml\n @object.marshal_dump.to_xml(:root => :response)\n end",
"def to_xml\n builder.target!\n end",
"def to_s\n '<Twilio.Messaging.V1.ServicePage>'\n end",
"def to_s\n \"#<#{self.class.name}:0x#{object_id.to_s(16).rjust(14, \"0\")} host='#{client.host}'>\"\n end",
"def inspect\n values = @properties.map{|k, v| \"#{k}: #{v}\"}.join(\" \")\n \"<Twilio.Api.V2010.UserDefinedMessageSubscriptionInstance #{values}>\"\n end",
"def render\n content = MessageBuilder.build do |msg|\n return_tag = tags.find do |tag|\n tag.tag_name == \"return\"\n end\n\n attr_kind = if reader? && writer?\n \"attr_accessor\"\n elsif reader? && !writer?\n \"attr_reader\"\n elsif writer? && !reader?\n \"attr_writer\"\n end\n\n msg.bold do\n msg.write path\n if return_tag\n types_signature = return_tag.types.join(\", \")\n msg.write(\" \\u{279c} (#{types_signature})\")\n end\n end\n\n msg.space\n\n msg.inline_code_block do\n msg.write(\"[#{type}, #{visibility}\")\n msg.write(\", #{attr_kind}\") if attr_kind\n msg.write(\", alias: #{name}\") if @alias\n msg.write(\"]\")\n end\n\n msg.newline\n\n if docstring.empty?\n if return_tag\n msg.write(return_tag.text.capitalize)\n else\n msg.italics do\n msg.write(\"No documentation available..\")\n end\n end\n else\n msg.write(docstring)\n end\n\n if signature\n msg.code_block do\n msg.write(signature)\n end\n end\n end\n\n Reply.new(content, embed)\n end",
"def inspect\n result = +\"#<#{self.class.name}:#{object_id}\"\n result << ' @vertices=['\n list = vertices.map { |v| \"#<#{v.selfie}>\" }\n result << list.join(', ')\n result << '] '\n edges = []\n vertices.each do |v|\n edges << v.edges do |e|\n result << \"#{v.object_id} #{e.inspect}\"\n end\n end\n result << \"edges=[#{edges.join(\",\\n \")}]>\"\n result\n end",
"def full_content\n result = <<END\n Message from: #{@name}\n\n Phone: #{@phone}\n Email: #{@email}\n %=====================================%\n\n #{self.content}\nEND\n result\n end",
"def inspect\n string = \"svc name [#{@service_name}], seqid #{sequence_id.inspect}, \"\n string << \"addr #{@address.inspect}, \"\n @frames.each_with_index { |frame, index| string << \"[#{index}]#{frame.copy_out_string}, \" }\n string\n end",
"def to_s\n\t\t\tsprintf(\"#<%s:0x%x size=%d>\", self.class.name, object_id, @size)\n\t\tend",
"def inspect\n context = @solution.map{|k, v| \"#{k}: #{v}\"}.join(',')\n \"#<Twilio.Api.V2010.UserDefinedMessageSubscriptionContext #{context}>\"\n end",
"def to_s\n values = @params.map{|k, v| \"#{k}: #{v}\"}.join(\" \")\n \"<Twilio.Messaging.V1.ServiceInstance #{values}>\"\n end",
"def to_s\n # Update length of the stream.\n @content[pn(:Length)] = @stream.length\n # Convert to String.\n out = []\n out << \"#{@element_id} #{@revision} obj\"\n out << @content.to_s\n out << @stream.to_s\n out << \"endobj\"\n return out.join(LF)\n end",
"def build_xml( id, price, options )\n xml = ::Builder::XmlMarkup.new\n xml.GenerateRequest do\n xml.PxPayUserId ::Pxpay::Base.pxpay_user_id\n xml.PxPayKey ::Pxpay::Base.pxpay_key\n xml.AmountInput sprintf(\"%.2f\", price)\n xml.TxnId id\n xml.TxnType options[:txn_type] ? options[:txn_type].to_s.capitalize : \"Purchase\"\n xml.CurrencyInput options[:currency_input] || \"NZD\"\n xml.MerchantReference options[:merchant_reference] || options[:reference] || id.to_s ## Backwards compatibility\n xml.UrlSuccess options[:url_success] || ::Pxpay::Base.url_success \n xml.UrlFail options[:url_failure] || ::Pxpay::Base.url_failure\n xml.EmailAddress options[:email_address] if options[:email_address]\n xml.TxnData1 options[:txn_data1] if options[:txn_data1]\n xml.TxnData2 options[:txn_data2] if options[:txn_data2]\n xml.TxnData3 options[:txn_data3] if options[:txn_data3]\n xml.TxnData4 options[:txn_data4] if options[:txn_data4]\n xml.Opt options[:opt] if options[:opt]\n xml.EnableAddBillCard 1 if options[:token_billing]\n xml.BillingId options[:billing_id] if options[:token_billing]\n end\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def to_s\n pad_field :body\n recalc_block_len\n super\n end",
"def inspect\n attributes = [\n \"feature=#{feature.name.inspect}\",\n \"description=#{description.inspect}\",\n \"adapter=#{adapter.name.inspect}\",\n \"adapter_key=#{adapter_key.inspect}\",\n \"toggle_class=#{toggle_class.inspect}\",\n \"toggle_value=#{toggle.value.inspect}\",\n ]\n \"#<#{self.class.name}:#{object_id} #{attributes.join(', ')}>\"\n end",
"def builder\n @builder ||= Builder::XmlMarkup.new(:indent => 2)\n end",
"def to_xml(options = {}) \n\t\t\t@xml = \"\"\n\t\t\t@builder = Builder.new( :target => @xml, :indent => 4 )\n\t\t\t@builder.access_request!(self) unless !options[:access_request].nil? && !options[:access_request]\n\t\t\t@builder.instruct! unless !options[:instruct].nil? && !options[:instruct]\n\t\t\t\n\t\t\t# load this class's YAML file\n\t\t\tmap = YAML.load_file(\"#{LABEL_SERVICE_ROOT}/methods/#{self.request_action.underscore}.yml\")\n\n\t\t\t# this is in order to provide default and other values\n\t\t\tto_v = lambda do |value|\n\t\t\t\treturn value.gsub(\"'\",\"\") if value =~ /^'(.*)'$/ # it's string literal\n\t\t\t\tm = value.to_sym\n\t\t\t\treturn self.send(m) if self.respond_to?(m) && !self.send(m).nil? # it's an instance method\n\t\t\t\treturn self.class.config.send(m) if self.class.config.respond_to?(m) && !self.class.config.send(m).nil? # it's a config method\n\t\t\t\treturn nil\n\t\t\tend\n\n\t\t\t# recurse!\n\t\t\tto_x = lambda do |key, value| \n\t\t\t\tel = key.camelize.to_sym\n next if el == :ReturnService and to_v.call(:return_service).nil?\n\t\t\t\tcase value\n\t\t\t\t\twhen String then @builder.__send__(el, to_v.call(value))\n\t\t\t\t\twhen Hash then @builder.__send__(el){|b| \n value.each(&to_x)\n }\n\t\t\t\t\twhen Array then @builder = self.send(\"#{key}_array\", @builder)\n\t\t\t\tend\n\t\t\tend\n\n\t\t\t# loop through map and build xml\n\t\t\tmap.each(&to_x) \n\t\t\t@xml\n\t\tend",
"def to_s\n \"#<#{self.class.name}:#{object_id}> @names=#{names}>\"\n end",
"def inspect\n values = @properties.map{|k, v| \"#{k}: #{v}\"}.join(\" \")\n \"<Twilio.Messaging.V1.ServiceInstance #{values}>\"\n end",
"def generate_string(options = {})\n string = ''\n\n string << \"# -----BEGIN SECTION #{name}-----\\n\"\n\n @elements.each do |element|\n string << element.to_s(options)\n end\n\n string << \"# -----END SECTION #{name}-----\\n\"\n\n string\n end",
"def to_s\n Semantic.check_class(self) do\n str = \"function #{name}(#{@parameters.join(', ')}) {\\n\"\n Indentation.indent do\n str << \"#{@attributes.map(&:to_s).join(\"\\n\")}\\n\" unless\n @attributes.empty?\n str << \"#{Indentation.get}var self = this;\\n\\n\"\n str << \"#{function2method(@constructor)}\"\n unless @private_methods.empty?\n str << \"\\n#{@private_methods.map(&:to_s).join(\"\\n\")}\"\n end\n unless @public_methods.empty?\n str << \"\\n\"\n str << @public_methods.map { |m| function2method(m) }.join(\"\\n\")\n end\n str << \"\\n#{Indentation.get}this.init(#{@parameters.join(', ')});\\n\"\n end\n \"#{str}#{Indentation.get}}\\n\"\n end\n end",
"def inspect\n return 'nil' if @obj.nil?\n\n \"#<#{self.class.name}:#{fullName}>\"\n end",
"def to_s\n \"#{self.class.to_s}::<#{object_id}> Args: #{args}\"\n end",
"def message_template; end",
"def inspect\n obj_id = \"%x\" % (object_id << 1)\n \"#<#{self.class}:0x#{obj_id} @jid=\\\"#{@jid}\\\" @name=#{@name.inspect}>\"\n end",
"def to_s\n \"------\\nA new purchase has been made:\\nTransaction id: #{@id}, customer: #{@customer.name}, product: #{@product.title}\\n------\"\n end",
"def inspect\n context = @solution.map{|k, v| \"#{k}: #{v}\"}.join(',')\n \"#<Twilio.Proxy.V1.ServiceContext #{context}>\"\n end",
"def inspect\n attributes = [\n \"name=#{name.inspect}\",\n \"key=#{key.inspect}\",\n \"data_type=#{data_type.inspect}\",\n ]\n \"#<#{self.class.name}:#{object_id} #{attributes.join(', ')}>\"\n end",
"def to_s\n headers = \"\"\n headers = YAML::dump(@headers).sub(/^--- \\n/, '') unless @headers.empty?\n extended = \"\"\n extended = \"\\n<!--more-->\\n#{@raw_extended}\" if @raw_extended\n \"#{headers}\\n#{@raw_body}#{extended}\"\n end",
"def mgs_hops_order_confirm_message\n $tracer.trace(__method__)\n return ToolTag.new(div.className(\"order_confirm_text\"), __method__, self)\n\tend",
"def to_s\n context = @solution.map{|k, v| \"#{k}: #{v}\"}.join(',')\n \"#<Twilio.Api.V2010.UserDefinedMessageSubscriptionContext #{context}>\"\n end",
"def to_xml\n xml_builder = Nokogiri::XML::Builder.new do |xml|\n xml.Package('xmlns' => 'http://soap.sforce.com/2006/04/metadata') {\n self.each do |key, members|\n xml.types {\n members.each do |member|\n xml.members member\n end\n xml.name key.to_s.camelize\n }\n end\n xml.version Metaforce.configuration.api_version\n }\n end\n xml_builder.to_xml\n end",
"def build_result(action)\n xml = ''\n doc = Builder::XmlMarkup.new(:target => xml)\n doc.tag!(\"#{action}Response\", :xmlns => \"http://sdb.amazonaws.com/doc/2007-11-07\") do\n if block_given?\n yield doc\n end\n \n doc.ResponseMetadata do\n doc.RequestId \"1234\"\n doc.BoxUsage \"0\"\n end\n end\n \n xml\n end",
"def to_s\n values = @params.map{|k, v| \"#{k}: #{v}\"}.join(\" \")\n \"<Twilio.Api.V2010.UserDefinedMessageSubscriptionInstance #{values}>\"\n end",
"def generate(obj)\n factory = obj.factory\n if @type_format == :ewkb || @type_format == :wkb12\n @cur_has_z = factory.property(:has_z_coordinate)\n @cur_has_m = factory.property(:has_m_coordinate)\n else\n @cur_has_z = nil\n @cur_has_m = nil\n end\n @cur_dims = 2 + (@cur_has_z ? 1 : 0) + (@cur_has_m ? 1 : 0)\n start_emitter\n generate_feature(obj, true)\n finish_emitter\n end",
"def request(options = {}, &body)\n soap_xml = request_builder.render(\n body: options.delete(:body) || body,\n header: options.delete(:header),\n )\n options[:body] = soap_xml\n soap_request options\n end",
"def build_request(body, options)\n xsd_version = test? ? TEST_XSD_VERSION : PRODUCTION_XSD_VERSION\n\n xml = Builder::XmlMarkup.new indent: 2\n xml.instruct!\n xml.tag! 's:Envelope', { 'xmlns:s' => 'http://schemas.xmlsoap.org/soap/envelope/' } do\n xml.tag! 's:Header' do\n xml.tag! 'wsse:Security', { 's:mustUnderstand' => '1', 'xmlns:wsse' => 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd' } do\n xml.tag! 'wsse:UsernameToken' do\n xml.tag! 'wsse:Username', @options[:login]\n xml.tag! 'wsse:Password', @options[:password], 'Type' => 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText'\n end\n end\n end\n xml.tag! 's:Body', { 'xmlns:xsi' => 'http://www.w3.org/2001/XMLSchema-instance', 'xmlns:xsd' => 'http://www.w3.org/2001/XMLSchema' } do\n xml.tag! 'requestMessage', { 'xmlns' => \"urn:schemas-cybersource-com:transaction-data-#{xsd_version}\" } do\n add_merchant_data(xml, options)\n xml << body\n end\n end\n end\n xml.target!\n end",
"def build_body(action , data)\n\t\t@name_space = 'a'\n\t\tbody = {\n\t\t\t'soap:Envelope' => {\n\t\t\t\t'xmlns:soap' => 'http://schemas.xmlsoap.org/soap/envelope/',\n\t\t\t\t'xmlns:xsi' => 'http://www.w3.org/2001/XMLSchema-instance',\n\t\t\t\t'xmlns:xsd' => 'http://www.w3.org/2001/XMLSchema',\n\t\t\t\t'soap:Body' => {\n\t\t\t\t\t\"#{action.name}\" => {\n\t\t\t\t\t\t\"xmlns\" => WsdlParser.target_namespace\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif(!data.nil?)\n\t\t\tdata.each do |key, value|\n\t\t\t\tbuild_param(body['soap:Envelope']['soap:Body'][\"#{action.name}\"],action,key,value)\n\t\t\tend\n\t\tend\n\n\t\tXmlSimple.xml_out(body, 'RootName' => nil)\n\tend",
"def to_xml\n Builder.new(self).to_s\n end",
"def make_booking(property_id, arrival_date, departure_date, guest)\n message = builder.new(encoding: 'utf-8') do |xml|\n xml.root do\n xml.APIUsername credentials.username\n xml.APIPassword credentials.password\n xml.BD do\n xml.ArrivalDate arrival_date\n xml.DepartureDate departure_date\n xml.PropertyID property_id\n guest.to_xml(xml)\n xml.PoolHeatRequired false\n xml.xmlMsg\n xml.jSonMsg\n end\n end\n end\n message.doc.root.children.to_xml\n end",
"def inspect\n\t\treturn \"#<%p:%#0x %s (%s) ack: %s, routing: %p, prefetch: %d>\" % [\n\t\t\tself.class,\n\t\t\tself.object_id * 2,\n\t\t\tself.name,\n\t\t\tself.consumer_tag,\n\t\t\tself.acknowledge ? \"yes\" : \"no\",\n\t\t\tself.routing_keys,\n\t\t\tself.prefetch,\n\t\t]\n\tend"
] | [
"0.58817387",
"0.57534456",
"0.57176",
"0.5630687",
"0.5561366",
"0.55007046",
"0.5497265",
"0.5489613",
"0.5456299",
"0.54435",
"0.5423764",
"0.5360477",
"0.53596145",
"0.5348982",
"0.5336954",
"0.53322047",
"0.5304959",
"0.53029126",
"0.52865994",
"0.5283085",
"0.52460164",
"0.52287084",
"0.52205855",
"0.5205446",
"0.5205168",
"0.5196607",
"0.5190936",
"0.51883614",
"0.51777905",
"0.51777905",
"0.51777905",
"0.51564354",
"0.5146312",
"0.5140648",
"0.51394635",
"0.51313716",
"0.51269805",
"0.5115224",
"0.5114055",
"0.5111821",
"0.5111821",
"0.50995255",
"0.5089822",
"0.5074437",
"0.50676805",
"0.5062599",
"0.50412875",
"0.5033426",
"0.50321704",
"0.5026157",
"0.5018094",
"0.50092745",
"0.5003431",
"0.4998807",
"0.4998752",
"0.49926412",
"0.4989753",
"0.49896693",
"0.49853218",
"0.4954052",
"0.49409363",
"0.4937799",
"0.49336317",
"0.4927922",
"0.49264017",
"0.4921705",
"0.49138865",
"0.49034637",
"0.48962647",
"0.48855373",
"0.48855373",
"0.48855373",
"0.48855373",
"0.4873014",
"0.48722795",
"0.48714402",
"0.4861178",
"0.48573884",
"0.48556158",
"0.48532602",
"0.4848992",
"0.48413333",
"0.48405683",
"0.4834904",
"0.48331147",
"0.48314577",
"0.4827425",
"0.48232678",
"0.48218557",
"0.48206976",
"0.48190293",
"0.4814987",
"0.4804414",
"0.48020035",
"0.48009288",
"0.4798028",
"0.47914055",
"0.47889167",
"0.47870126",
"0.47869876",
"0.47841552"
] | 0.0 | -1 |
Overriding Devise builtin active_for_authentication? method | def active_for_authentication?
super and not self.deleted?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def active_for_authentication?\n super\n end",
"def active_for_authentication?\n super && active\n end",
"def active_for_authentication?\n active? && super\n end",
"def active_for_authentication?\n super && self.active? # i.e. super && self.active\n end",
"def active_for_authentication?\n super && active?\n end",
"def active_for_authentication?\n super && active?\n end",
"def active_for_authentication?\n super && active?\n end",
"def active_for_authentication?\n super && self.is_active?\n end",
"def active_for_authentication?\n super && is_active?\n end",
"def active_for_authentication?\n super && isActive\n end",
"def active_for_authentication?\n self.active?\n end",
"def active_for_authentication?\n super and self.enabled?\n end",
"def active_for_authentication?; end",
"def active_for_authentication?; end",
"def active_for_authentication?\n\t\tsuper && (self.is_active)\n\tend",
"def active_for_authentication?\r\n super && active_status?\r\n end",
"def active_for_authentication?\n super && !self.deactivated # i.e. super && self.is_active\n end",
"def active_for_authentication?\n super && assigned?\n end",
"def active_for_authentication?\n super && self.enabled? && self.is_allowed_in?\n end",
"def active_for_authentication?\n super && account_active?\n end",
"def active_for_authentication?\n super and self.disponible?\n end",
"def active_for_authentication?\n super && activation_status == true\nend",
"def active_for_authentication?\n #remember to call the super\n #then put our own check to determine \"active\" state using\n #our own \"is_active\" column\n super and self.is_active?\n end",
"def active_for_authentication?\n #remember to call the super\n #then put our own check to determine \"active\" state using\n #our own \"is_active\" column\n super and self.is_active?\n end",
"def active_for_authentication?\n #remember to call the super\n #then put our own check to determine \"active\" state using\n #our own \"is_active\" column\n super and self.is_active?\n end",
"def active_for_authentication?\n #remember to call the super\n #then put our own check to determine \"active\" state using \n #our own \"is_active\" column\n super and self.is_active?\n end",
"def active_for_authentication?\n #remember to call the super\n #then put our own check to determine \"active\" state using \n #our own \"is_active\" column\n super and self.is_active?\n end",
"def active_for_authentication?\n super and !self.disabled?\n end",
"def active_for_authentication?\n super && !disabled?\n end",
"def active_for_authentication?\n !deactivated? && super\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && !disabled\n end",
"def active_for_authentication? \n super && approved? \n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication?\n super && approved?\n end",
"def active_for_authentication? \n \tsuper && approved?\n \t\n\tend",
"def active_for_authentication?\n super && !self.is_disabled\n end",
"def active_for_authentication?\n super && !disabled?\n end",
"def active_for_authentication?\n super && !banned?\n end",
"def active_for_authentication?\n super && un_blocked?\n end",
"def active_for_authentication?\n super && self.approved?\n end",
"def active_for_authentication?\n login_email = multi_email.login_email_record\n\n if login_email && !login_email.primary?\n super && login_email.active_for_authentication?\n else\n super\n end\n end",
"def active_for_authentication?\n super && !banned\n end",
"def active_for_authentication? \n super && (self.is_disabled == 0)\n end",
"def active_for_authentication? \n super && is_approved? \n end",
"def active_for_authentication?\n super and self.locked != 1\n end",
"def active_for_authentication?\n # logger.debug self.to_yaml\n super && account_active?\n end",
"def active_for_authentication?\n super && !self.blocked\n end",
"def active_for_authentication?\n super && !self.blocked\n end",
"def active_for_authentication? \n super && (approved? || self.admin) \n end",
"def active_for_authentication?\n super && !expired?\n end",
"def resource_active_for_authentication?\n !@resource.respond_to?(:active_for_authentication?) || @resource.active_for_authentication?\n end",
"def active_for_authentication?\n super && !deactivated_at\n end",
"def active_for_authentication?\n super && !deactivated_at\n end",
"def active_for_authentication?\n super && !deactivated_at\n end",
"def active_for_authentication?\n super && !self.deactivated_staff?\n end",
"def active_for_authentication?\n super && !access_locked?\n end",
"def active_for_authentication?\n super && !disabled_at\n end",
"def check_authentication\n authenticate_user\n end",
"def authenticate_active_admin_user\n send(active_admin_namespace.authentication_method) if active_admin_namespace.authentication_method\n end",
"def active_for_authentication?\n super && !suspended_account\n end",
"def active_for_authentication?\n super && !deleted? && approved?\n end",
"def active_for_authentication?\n super && !deleted? && approved?\n end",
"def active_for_authentication?\n super && bloqueado == 0\n end",
"def authenticate_active_admin_user\n send(active_admin_namespace.authentication_method) if active_admin_namespace.authentication_method\n end",
"def active_for_authentication?\n super && !deleted?\n end",
"def active_for_authentication?\n super && !deleted?\n end",
"def active_for_authentication?\n super && !deleted?\n end",
"def valid_for_authentication?; end",
"def valid_for_authentication?; end",
"def active_for_authentication?\n super && !deleted_at && self.enabled?\n end",
"def authenticate_user?\n !current_user.nil?\n end",
"def authenticate?\n @authentication_required\n end",
"def active_for_authentication? \n super && !is_deleted \n end",
"def user_authentication!\n if params[:auth_token].present? or params[:token_authentication]== \"true\"\n #this is for api authentication and access\n authenticate_user_from_token!\n else\n #normal devise authentication\n authenticate_user!\n end\n end",
"def active_for_authentication?\n super && !delete_flag?\n end",
"def authenticate_active_admin_user\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n send(active_admin_namespace.authentication_method) if active_admin_namespace.authentication_method\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n end",
"def application_authenticated?\n !!application_authentication\n end",
"def active_for_authentication?\n super && !delete_flag\n end",
"def active_for_authentication?\n super && (!confirmation_required? || confirmed? || confirmation_period_valid?)\n end",
"def is_authenticated?\n end",
"def active_for_authentication?\n # User is approved when harp_id is assigned.\n # Approved flag is set programmatically when harp_id is set by an admin.\n super && is_approved?\n end",
"def active_for_authentication?\n \tsuper && !delete_flag\n end",
"def authentication_successful?\n current_user.present?\n end",
"def capable_login_auth?; end",
"def authenticated_user?\n warden.authenticated?\n end",
"def authenticated?(*args)\n warden.authenticated?(*args)\n end",
"def authentication?\n !!@authentication\n end",
"def authenticate\n logged_in? ? true : access_denied\nend",
"def authenticated?\n authentication.authenticated?\n end",
"def user_authenticated?\n basic_authenticated? || token_authenticated?\n end",
"def authenticate\n logged_in? ? true : access_denied\n end",
"def user_authentication\n end",
"def active_for_authentication?\n super and !self.is_archived? #determine \"active\" state using our own \"is_archived\" column\n end",
"def authenticate_user\n Rails.application.config.use_omniauth ? login_required : check_session\n end"
] | [
"0.8767851",
"0.8583711",
"0.8578878",
"0.8572761",
"0.8554177",
"0.8554177",
"0.8554177",
"0.8548365",
"0.8534906",
"0.8447979",
"0.83890665",
"0.8366759",
"0.83224434",
"0.83224434",
"0.82388663",
"0.8175957",
"0.81689036",
"0.81685984",
"0.81514776",
"0.8150547",
"0.80701196",
"0.7996512",
"0.79606074",
"0.79606074",
"0.79606074",
"0.79443187",
"0.79443187",
"0.7871056",
"0.7860318",
"0.7858301",
"0.778663",
"0.7785788",
"0.7773412",
"0.777256",
"0.777256",
"0.777256",
"0.777256",
"0.777256",
"0.777256",
"0.777256",
"0.7693777",
"0.76832676",
"0.7661567",
"0.7615216",
"0.7585028",
"0.75500053",
"0.7530881",
"0.75173396",
"0.7510608",
"0.74930155",
"0.748322",
"0.7478726",
"0.74305165",
"0.74305165",
"0.7424471",
"0.74066246",
"0.7362951",
"0.7350176",
"0.7350176",
"0.7350176",
"0.73491573",
"0.732851",
"0.7328257",
"0.73237586",
"0.7307768",
"0.7219828",
"0.7195371",
"0.7195371",
"0.7178306",
"0.7120536",
"0.7083458",
"0.7083458",
"0.7083458",
"0.7055489",
"0.7055489",
"0.7034055",
"0.703155",
"0.700728",
"0.69687176",
"0.6942684",
"0.69405013",
"0.68842244",
"0.6879869",
"0.6842414",
"0.68422854",
"0.68341005",
"0.6827324",
"0.68265134",
"0.6741974",
"0.6731015",
"0.6720841",
"0.6704927",
"0.66829497",
"0.6682622",
"0.6673192",
"0.6664538",
"0.66436154",
"0.6635826",
"0.6633149",
"0.6629159"
] | 0.6956796 | 79 |
Alias to be deprecated | def is_provider?
self.provider?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def alias_of; end",
"def aliased_name; end",
"def deprecated_key=(_arg0); end",
"def aliases; end",
"def aliases; end",
"def aliases; end",
"def alias_names; end",
"def is_alias?; end",
"def private; end",
"def deprecated_tag(text); end",
"def alias?\n false\nend",
"def alias_decls; end",
"def extended(*) end",
"def _deprecation\n @link['deprecation']\n end",
"def with_class_alias(x); end",
"def deprecated(msg, src)\n red(msg)\n green(\"Called from \" + src)\n end",
"def wrapper; end",
"def aliases=(_arg0); end",
"def aliases=(_arg0); end",
"def alias_attribute(new_name, old_name)\n module_eval <<-STR, __FILE__, __LINE__ + 1\n def #{new_name}; self.#{old_name}; end # def subject; self.title; end\n def #{new_name}?; self.#{old_name}?; end # def subject?; self.title?; end\n def #{new_name}=(v); self.#{old_name} = v; end # def subject=(v); self.title = v; end\n STR\n end",
"def aliases\n end",
"def deprecate(old, new = T.unsafe(nil), id: T.unsafe(nil), reference: T.unsafe(nil), &block); end",
"def raise_deprecations; end",
"def old_name; end",
"def alias_attribute(new_name, old_name)\n module_eval <<-STR, __FILE__, __LINE__+1\n def #{new_name}; self.#{old_name}; end # def subject; self.title; end\n def #{new_name}?; self.#{old_name}?; end # def subject?; self.title?; end\n def #{new_name}=(v); self.#{old_name} = v; end # def subject=(v); self.title = v; end\n STR\n end",
"def silence_deprecations; end",
"def alias_method(sym1,sym2) end",
"def print_alias(*) end",
"def wrapper(name); end",
"def custom; end",
"def custom; end",
"def aliases\n\n end",
"def use_instead\n obsolete? ? @use_instead : nil\n end",
"def deprecated\n attributes.fetch(:deprecated)\n end",
"def deprecated\n attributes.fetch(:deprecated)\n end",
"def deprecated?\n false\n end",
"def singleton_method_alias( newname, original )\n\t\t\tsingleton_class.__send__( :alias_method, newname, original )\n\t\tend",
"def internal; end",
"def deprecated?\n superseded_by.present?\n end",
"def deprecated\n return unless object.has_tag?(:deprecated)\n\n erb(:deprecated)\nend",
"def rubygems_deprecate(name, replacement=:none)\n class_eval do\n old = \"_deprecated_#{name}\"\n alias_method old, name\n define_method name do |*args, &block|\n klass = self.kind_of? Module\n target = klass ? \"#{self}.\" : \"#{self.class}#\"\n msg = [ \"NOTE: #{target}#{name} is deprecated\",\n replacement == :none ? \" with no replacement\" : \"; use #{replacement} instead\",\n \". It will be removed in Rubygems #{Gem::Deprecate.next_rubygems_major_version}\",\n \"\\n#{target}#{name} called from #{Gem.location_of_caller.join(\":\")}\",\n ]\n warn \"#{msg.join}.\" unless Gem::Deprecate.skip\n send old, *args, &block\n end\n ruby2_keywords name if respond_to?(:ruby2_keywords, true)\n end\n end",
"def extra; end",
"def alias_deprecated_method(deprecated_method, new_method, version_deprecrated = nil, version_to_be_removed = nil)\n eval %{\n def #{deprecated_method}(*args)\n deprecate_method(:#{deprecated_method}, :#{new_method}, \"#{version_deprecrated}\", \"#{version_to_be_removed}\")\n #{new_method}(*args)\n end\n }\n end",
"def define_alias_methods(member_name, options); end",
"def oldmtd\r\n \"old method\"\r\nend",
"def rdl_alias(new_name, old_name)\n klass = self.to_s\n klass = \"Object\" if (klass.is_a? Object) && (klass.to_s == \"main\")\n RDL.aliases[klass] = {} unless RDL.aliases[klass]\n if RDL.aliases[klass][new_name]\n raise RuntimeError,\n \"Tried to alias #{new_name}, already aliased to #{RDL.aliases[klass][new_name]}\"\n end\n RDL.aliases[klass][new_name] = old_name\n\n if self.method_defined? new_name\n RDL::Wrap.wrap(klass, new_name)\n else\n RDL.to_wrap << [klass, old_name]\n end\n nil\n end",
"def original_name; end",
"def hidden_apis=(_arg0); end",
"def explicit; end",
"def silence_deprecations=(_arg0); end",
"def old_method\n \"old improved method\"\nend",
"def raise_deprecations=(_arg0); end",
"def refutal()\n end",
"def deprecate new_survey\n :reserved\n end",
"def original; end",
"def create_alias(new_name, old_name)\n alias_method new_name.to_sym, old_name.to_sym #getter\n alias_method \"#{new_name}=\".to_sym, \"#{old_name}=\".to_sym #setter\n end",
"def macro; raise NotImplementedError; end",
"def macro; raise NotImplementedError; end",
"def macro; raise NotImplementedError; end",
"def private_method\n end",
"def usage; end",
"def usage; end",
"def hardcore_alias(klass, *args)\n \"__#{klass.name}#{args}\"\n end",
"def initialize\n p \"deprecated method, instance should be creat by getNewGroup\"\n end",
"def [](method)\n warn \"#{Kernel.caller.first}: [DEPRECATION] #[#{method.inspect}] is deprecated. Use ##{method} to fetch the value.\"\n send(method.to_sym)\n rescue NoMethodError\n nil\n end",
"def deprecated?\n comment.include?(\"(Deprecated)\")\n end",
"def with_variant(...)\n ActiveSupport::Deprecation.silence do\n super(...)\n end\n end",
"def use(*)\n super\n end",
"def _warn_deprecated_command(cmd)\n warn \"\\n'#{cmd}' is deprecated! Please use 'defEvent' and 'onEvent' commands\"\n warn \"Deprecated commands will be removed in future OMF versions\"\n warn \"Moreover, they may not allow the use of some features in this version\"\n end",
"def as_you_like_it_quote; end",
"def name_safe?; end",
"def specialty; end",
"def overload; end",
"def escaper=(_); end",
"def alias_class_method(new_name, old_name)\n #klass = self.to_s\n metaclass.instance_eval do\n alias_method new_name, old_name\n end\n end",
"def alias_class_method(new_name, old_name)\n #klass = self.to_s\n metaclass.instance_eval do\n alias_method new_name, old_name\n end\n end",
"def oldmtd\n \"old method\"\nend",
"def from_installed_gems(*deprecated); end",
"def dir_alias()\n #This is a stub, used for indexing\n end",
"def deprecated_method_alias(name, aliased_method, removal_in: nil, class_method: false)\r\n validate name => Symbol, aliased_method => Symbol, removal_in => [NilClass, String],\r\n class_method => [TrueClass, FalseClass]\r\n\r\n target = class_method ? self.class : self\r\n class_name = self.name\r\n\r\n unless target.method_defined?(aliased_method)\r\n raise ArgumentError, \"#{class_name} does not have method #{aliased_method} defined\"\r\n end\r\n\r\n delimiter = class_method ? '.' : '#'\r\n\r\n target.define_method(name) do |*args, **kvargs|\r\n alias_name = format('%s%s%s', class_name, delimiter, name)\r\n aliased_method_name = format('%s%s%s', class_name, delimiter, aliased_method)\r\n Deprecation.warn_method(alias_name, removal_in, aliased_method_name)\r\n method(aliased_method).call(*args, **kvargs)\r\n end\r\n end",
"def treat_reserved_as_conflict; end",
"def metaalias(to, from)\n metaclass.instance_eval{alias_method to, from}\n end",
"def alt; end",
"def description\n str = \"alias :#{old_method_name}\"\n\n str << \" as #{new_method_name.inspect}\" if new_method_name\n\n str\n end",
"def disabled_warnings; end",
"def change_deprecation_iff_user_requested\n return nil unless @name.deprecated != (params[:name][:deprecated] == \"true\")\n\n !@name.deprecated\n end",
"def public_method; end",
"def define_name_helpers; end",
"def alias\n super || \"Local Relay\"\n end",
"def deprecated_yaml_erb_backend=(_arg0); end",
"def deprecated_yaml_erb_backend=(_arg0); end",
"def deprecated(obj, message = nil)\n obj_class = obj.is_a?(Class) ? \"#{obj}.\" : \"#{obj.class}#\"\n full_message = \"DEPRECATION WARNING: #{obj_class}#{caller_info[2]} \" +\n \"will be removed in a future version of Sass.#{(\"\\n\" + message) if message}\"\n Sass::Util.sass_warn full_message\n end",
"def real_name; end",
"def warn_deprecation(deprecated_name, *suggest_names)\n logger.warn \"[DEPRECATION] '#{deprecated_name}' is deprecated. Please use '#{suggest_names.join(', ')}' instead.\"\n end",
"def safe_alias_method(new_alias, method_name)\n target_class_eval do\n begin\n alias_method(new_alias, method_name)\n rescue NameError\n nil\n end\n end\n end",
"def viewableDescription\n deprecated_since(2009, 7, :viewable_description)\n viewable_description\n\tend",
"def test_deprecated_aliases\n\t\tassert_equal '#<Ole::Types::Clsid:{00020329-0880-4007-c001-123456789046}>',\n\t\t\t\t\t\t\t\t Ole::Types.load_guid(\"\\x29\\x03\\x02\\x00\\x80\\x08\\x07\\x40\\xc0\\x01\\x12\\x34\\x56\\x78\\x90\\x46\").inspect\n\t\tassert_equal '2006-12-31T19:00:00+00:00', Ole::Types.load_time(\"\\000\\370\\331\\336\\r-\\307\\001\").to_s\n\tend",
"def hidden_apis; end",
"def silly_adjective; end",
"def external; end",
"def specie; end"
] | [
"0.78874815",
"0.7004773",
"0.6925187",
"0.6728935",
"0.6728935",
"0.6728935",
"0.67086947",
"0.66253746",
"0.64813894",
"0.64297664",
"0.6377957",
"0.6330424",
"0.632362",
"0.631878",
"0.6313899",
"0.6311774",
"0.6282888",
"0.6279658",
"0.6279658",
"0.62695014",
"0.6265237",
"0.62597436",
"0.6253863",
"0.6240549",
"0.6215287",
"0.62112606",
"0.6203427",
"0.620232",
"0.61407936",
"0.6118137",
"0.6118137",
"0.6094588",
"0.60762256",
"0.60671586",
"0.60671586",
"0.60663426",
"0.60551775",
"0.6049041",
"0.6016767",
"0.60091394",
"0.59895426",
"0.5972125",
"0.59279525",
"0.5926273",
"0.5921482",
"0.5914952",
"0.5908903",
"0.5901373",
"0.5882205",
"0.5855215",
"0.58472586",
"0.5841551",
"0.58398485",
"0.58370656",
"0.58256614",
"0.58234936",
"0.58173746",
"0.58173746",
"0.58173746",
"0.5810662",
"0.58068806",
"0.58068806",
"0.5802732",
"0.5793171",
"0.57928026",
"0.57894945",
"0.57887197",
"0.57879066",
"0.5771014",
"0.57694227",
"0.5753827",
"0.57492614",
"0.5747728",
"0.57450014",
"0.5739078",
"0.5739078",
"0.57324916",
"0.5722633",
"0.5718328",
"0.57182324",
"0.56991446",
"0.5692409",
"0.56887674",
"0.5678615",
"0.56689185",
"0.5665812",
"0.5640685",
"0.56352335",
"0.56296873",
"0.5616596",
"0.5616596",
"0.56107616",
"0.56059474",
"0.55976",
"0.5593936",
"0.55913085",
"0.5591156",
"0.5590127",
"0.5583071",
"0.5572825",
"0.5570462"
] | 0.0 | -1 |
All comments created in the last day, or over the weekend if it is Monday Ex: On Monday, returns tasks created since Friday morning (Time.now 3.day) Ex: On Tuesday, returns tasks created since Monday morning (Time.now 1.day) | def digest_posts
# Comment.digest_visible.where( topic_id: self.subscribed_topics.pluck(:id) ).where("created_at > ?", (Time.now.monday? ? Time.now.midnight - 3.day : Time.now.midnight - 1.day))
# Post.digest_visible.where( topic_id: self.subscribed_topics.pluck(:id) ).where("created_at > ?", (Time.now.monday? ? Time.now.midnight - 3.day : Time.now.midnight - 1.day))
Post.current.where(status: 'approved').where("created_at > ?", (Time.now.monday? ? Time.now.midnight - 3.day : Time.now.midnight - 1.day))
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def client_tasks_upcoming\n today = Time.zone.now\n self.find_all {|e| (e.completed.nil? or !e.completed) and (e.complete_by >= today and e.complete_by <= (today + 6.days)) }\n end",
"def deadline\n weekly ? self.created_at.sunday.beginning_of_day : self.created_at.end_of_day\n end",
"def week\n @todoas = Todoa.where(user_id: current_user.id,done: false).where('due BETWEEN ? AND ?', Date.today, Date.today + 7.day)\n end",
"def events_seven_days()\n events = []\n today = Date.today\n for i in 0..6 do\n events += events_by_date(today + i)\n end\n return events\nend",
"def weekly_updates_by_project(since)\n since = Date.new(2000,01,01) if since == :all\n self.weekly_updates.where([\"weekstart > ?\",since]).order(\"project_id, weekstart desc\").group_by {|w| w.project}\n end",
"def cutoff_date\n case date.wday\n when 0\n date + (weeks.to_i * 7)\n when 1\n date + (weeks.to_i * 7) - 1\n when 2\n date + (weeks.to_i * 7) - 2\n when 3\n date + (weeks.to_i * 7) - 3\n when 4\n date + (weeks.to_i * 7) - 4\n when 5\n date + (weeks.to_i * 7) - 5\n when 6\n date + (weeks.to_i * 7) + 1\n end\n end",
"def find_all_pending_publication\n find(:all, :conditions => [\"comments.status = ? AND comments.published_at < ?\", \n 'created', Time.now.utc - 30.days])\n end",
"def end_date\n (created_at - 1.week).sunday - 1.day\n end",
"def upcoming_posts(n_weeks = nil)\n\n query_string = \"user_id = #{id} and (distribution_time >= '#{Time.zone.now.utc.to_s}')\"\n if n_weeks\n utc_cutoff = local_end_of_day(n_weeks * 7).utc\n query_string += \" and (distribution_time <= '#{utc_cutoff.to_s}')\" if n_weeks\n end\n\n posts = Post.joins(\n :distribution => :account\n ).where(query_string).order(\"distribution_time\")\n\n end",
"def of_the_day(is_of_a_day = true)\r\n public.without_practice_debate.scoped(:conditions => {:is_debate_of_day => is_of_a_day}, :limit => 1, :order => 'created_at DESC')\r\n end",
"def get_next_date\n today_wday = Date.today.wday\n wday_s = get_weekday(limit_weekday_start)\n wday_e = get_weekday(limit_weekday_end)\n if(wday_s < wday_e)\n if(today_wday > wday_e or today_wday < wday_s or(today_wday == wday_s and Time.now.hour < limit_time_start) or (today_wday == wday_e and Time.now.hour > limit_time_end))\n need_day = (wday_s + 7 - today_wday)%7\n return Time.parse(limit_time_start.to_s + \":00\", Time.now + need_day.day), true\n else\n need_day = wday_e - today_wday\n return Time.parse(limit_time_end.to_s + \":00\", Time.now + need_day.day), false\n end\n else\n if((wday_e < today_wday and today_wday < wday_s) or (today_wday == wday_s and Time.now.hour < limit_time_start) or (today_wday == wday_e and Time.now.hour > limit_time_end))\n need_day = wday_s - today_wday\n return Time.parse(limit_time_start.to_s + \":00\", Time.now + need_day.day), true\n else\n need_day = (wday_e + 7 - today_wday)%7\n return Time.parse(limit_time_end.to_s + \":00\", Time.now + need_day.day), false\n end\n end\n end",
"def send_on_date(n_weeks)\n today = category.user.today\n send_on_date = today + n_weeks * 7 + (send_on - today.wday % 7)\n send_on_date += 7 if today.wday > send_on\n send_on_date\n end",
"def beginning_of_week; end",
"def beginning_of_week; end",
"def days_since_howl\n days = (Date.today - self.created_at.to_date).to_i\n case days\n when 0\n \"Today\"\n when 1\n \"Yesterday\"\n when 2\n \"2 Days Ago\"\n when 3\n \"3 Days Ago\"\n when 4\n \"4 Days Ago\"\n when 5\n \"5 Days Ago\"\n when 6\n \"6 Days Ago\"\n when 7\n \"Over A Week Ago\"\n else\n \"Long Ago\"\n end\n end",
"def find_someday\n self.all(\n :completed_at => nil, \n :someday => true,\n :order => [:title])\n end",
"def future from=7.days\n schedules.where(:when.gte => from.from_now)\n end",
"def get_dates(arr) \ndayDifference = Date.today - 7.days\narr.select do |d| \nif d.created_at.to_date > dayDifference \n d \n end\nend \nend",
"def date_end # originally date_start\n\tdate = Date.today\n\t(1..7).each do |n|\n\t\tdate = Date.today - n#.days\n\t\tbreak if date.wday == 6 # 0 = Sun, 1 = Mon ... 6 = Sat\n\tend\n\tdate\nend",
"def today\n tod = Time.now.strftime(\"%Y-%m-%d\")\n @tasks.each do |project_name, project_tasks|\n project_tasks.each do |task|\n if task.deadline.to_s == tod\n puts task.description\n end\n end\n end\n end",
"def due_today\n TodosList.new(@todos.filter { |todo| todo.due_today? })\n end",
"def wday() end",
"def upcoming_events\n events.where(\"date > ?\", Time.now)\n end",
"def todays_likes\n dayEnd = DateTime.now.beginning_of_day\n dayStart = dayEnd - 1.day\n\n likes.where(\"likes.created_at > ? and likes.created_at < ?\", dayStart, dayEnd)\n end",
"def overdue\n schedules.where(:when.lte => Time.now)\n end",
"def show_due_today\n task = Task.where(due_date: Date.today).all\nend",
"def comments\n # event_ids = events(@user).pluck(:id)\n # Comment.where(commentable_type: \"Event\", commentable_id: event_ids, program_favorite: true)\n # events(@user).unscope(:order).order(start_date_time: :asc).each { |event| event.map { |event| event.favorite_comments.first } }\n events(@user).map { |e| e.comments.select { |c| c.event_favorite == true }.first(2) }.flatten\n end",
"def week\n created_at.beginning_of_week\n end",
"def due_today\n @due_date_task_list.select { |duedatetask| (duedatetask.status == \"incomplete\") && (duedatetask.due_date == Date.today) }\n end",
"def aging_weeks__c\n if %w(1 true).include?(is_closed.to_s.downcase)\n (last_modified_date.to_i-created_date.to_i)/(60*60*24*7)\n else\n (Time.now.to_i - created_date.to_i)/(60*60*24*7)\n end\n end",
"def weekly_plan_workouts(todays_date) \n # lookup week day value of today's workout (name/description) by plan_day.\n # (for example, plan_day:103 is held on week_day:0 (Sunday))\n plan_week_day = current_user.training_cycles.last.plan.workouts.find_by(plan_day:plan_day(todays_date)).week_day\n\n # returns plan_day numbers for beginning and end of week surrounding the plan_day to create a weekly range from which to pull workouts\n # (if plan_day = 10, then week_start (1) & week_end (7) are plan_day:6 & 12 respectively)\n plan_week_start = (plan_day(todays_date) + 1) - plan_week_day\n\t\tplan_week_end = plan_day(todays_date) + (7 - plan_week_day)\n\n\t\t@workouts = current_user.training_cycles.last.plan.workouts.includes(:workout_type).where(plan_day:plan_week_start..plan_week_end).order(\"plan_day\")\n\tend",
"def calculate_week_time\n week_begin = DateTime.now.utc.beginning_of_week\n deadline = DateTime.now.utc.end_of_week\n self.commits.where(\"begin_time > ? and end_time < ?\", week_begin, deadline).sum(:spent_time)\n end",
"def week\n first_day_of_week = @date.monday\n \n days_of_week = []\n 7.times do |time|\n days_of_week << day_and_types(first_day_of_week + time.days)\n end \n \n days_of_week\n end",
"def get_comments\n return Comment.where(design_problem_id: self.id).sort_by! { |x| x.created_at }.sort! { |a,b| b.created_at <=> a.created_at }\n end",
"def regular_comments\n Comment.find(:all, :order => \"created_at DESC\", :conditions => [\"commentable_id = ? AND commentable_type = ? AND comment_type < ?\", id, 'Screen', 5])\n end",
"def breast_feeding_times_today\n breast_feedings.where(\"created_at >= :start_at AND created_at <= :end_date\", {start_at: Time.now.beginning_of_day, end_date: Time.now}).count\n end",
"def client_tasks_overdue\n self.find_all {|e| (e.completed.nil? or !e.completed) and e.complete_by < Time.zone.now }\n end",
"def upcoming_calendar_events\n self.calendar_events.joins(:event).where(\"DATEDIFF(`start_time`,?) >= 0\", Time.now.beginning_of_day)\n end",
"def comments_after(comments, time)\n valid_comments = []\n comments.each do |comment|\n if Time.parse(comment['updated_at']) > time\n valid_comments << comment\n end\n end\n\n valid_comments\n end",
"def weekdays_ago(time = ::Time.now)\n # -5.weekdays_ago(time) == 5.weekdays_from(time)\n return self.abs.weekdays_from(time) if self < 0\n \n x = 0\n curr_date = time\n\n until x == self\n curr_date -= 1.days\n x += 1 if curr_date.weekday?\n end\n\n curr_date\n end",
"def new_replies\n last_comment_view.nil? ? replies : replies.find(:all, :conditions => [\"thing_comments.created_at > ?\", last_comment_view])\n end",
"def day_link\n h.link_to_unless_current('24小时内热评', {action: :most_replied, id: object.id, limit: 'day'})\n end",
"def load_unread_comments\n @unread_comments = Comment.unread_by(current_user)\n if current_user.student?\n @unread_comments = @unread_comments.select do |comment| \n comment_parent = comment.commentable_type.classify.constantize.find(comment.commentable_id)\n comment_parent_type = comment.commentable_type.underscore\n if comment_parent_type == \"school_day\"\n comment_parent.calendar_date <= Date.today\n else\n comment_parent.school_days.order(\"calendar_date\")[0].calendar_date <= Date.today if !comment_parent.school_days.empty?\n end\n end\n end\n @unread_comments.sort_by!(&:created_at).reverse!\n end",
"def completed_weekly_tasks\n self.weekly_tasks.where(\n :is_weekly_attendance_marking_done => true ,\n :is_weekly_payment_collection_finalized => true, \n :is_weekly_payment_approved_by_cashier => true \n )\n end",
"def last_7_day_kwh_usage_by_day\n usage_data = []\n Time.now.utc.to_date.downto(6.days.ago.utc.to_date).each do |date|\n usage_data << total_day_kwh_usage_on(date).round(2)\n end\n usage_data\n end",
"def top_comm\n\t\t\t@commenters = User.all.order('comments.id DESC').joins('LEFT JOIN comments ON \n\t\t\t\tcomments.user_id == users.id').group('users.id').where('comments.created_at >= ?',\n\t\t\t\t1.week.ago.utc).limit(10)\n\t\tend",
"def upcoming_sport_sessions\n sport_sessions_confirmed.where('date >= ?', Date.today).limit(3) #TODO: maybe we should use DateTime.now or Date.now\n end",
"def biblreview_postponed_until\n postpone_date = postpone_dates.where(deleted_at: nil).where(\"postponed_until > (?)\", DateTime.now).first\n if postpone_date\n return postpone_date.postponed_until\n else\n return nil\n end\n end",
"def w_day; end",
"def seven_days\n @transactions = where(\"created_at >= ?\", (Time.now - 7.days))\n end",
"def weekdays_ago(date = ::Time.now.to_date)\n x = 0\n curr_date = date\n until x == self\n curr_date = curr_date - 1.days\n x += 1 if curr_date.weekday?\n end\n \n curr_date\n end",
"def trucks_open_today\n trucks = self.class.get_truck_data\n date = self.class.get_time\n\n trucks_today = []\n trucks.each do |truck|\n if date.strftime(\"%A\") == truck[\"dayofweekstr\"]\n trucks_today << truck[\"applicant\"]\n end \n end\n p trucks_today\n end",
"def recent_activity\n\t\t(Comment.all + Content.all + Mood.all + Project.all + Milestone.all + Form.all + Client.all + Contact.all).sort_by {|x| x[:updated_at]}.reverse\n\tend",
"def week_from_today\n ::Date.today + 7\n end",
"def get_jobs_last_7_days()\n @data = get_all()\n now = Time.now.to_date\n jobs = []\n for job in @data[\"jobs\"]\n parsed_date = Date.strptime(job[\"date\"], \"%d/%m/%Y\")\n days_difference = (parsed_date - now).to_i\n if days_difference <= 7 && days_difference >= 0\n jobs.push(job)\n end\n end\n return jobs\n end",
"def commits_in_week(date)\n start_date = date.beginning_of_week.beginning_of_day\n end_date = date.end_of_week.end_of_day\n return commits_between(start_date, end_date)\n end",
"def last_comment\n self.comments.order(:timestamp_creation => :asc).last\n end",
"def index\n @user = current_user\n @news_items = NewsItem.find(:all,:conditions=>\"created_at > '#{1.week.ago}'\", :order=>\"created_at desc\")\n @recent_tickets = Ticket.find(:all, :conditions=>\"status != 'closed' and status != 'resolved' and created_at > '#{5.days.ago}'\",:limit=>10)\n end",
"def calendar_wdays(starting_day = 0)\n start_week = Date.today.beginning_of_week + (starting_day - 1).days # In rails week start in monday and monday.wday is 1\n (start_week...start_week+7.days).collect { |day| I18n.l(day, :format => '%A') }\n end",
"def day_in_week_str\n\t\tdays = []\n\t\ttoday = Time.now\n\t\tdays.push(formatted(today))\n\t\ttoday_week_day = today.wday\n\t\tanothers = (0..6).select {|e| e != today_week_day}\n\t\tanothers.map do |e|\n\t\t\tdays.push(formatted(Time.at(today.to_i - e*day_second)))\n\t\tend\n\n\t\tdays.sort\n\tend",
"def daily_morning\n logger.info \" daily_morning\"\n run('Newsletter', :send!)\n end",
"def latest_comments\n self.comments.order(created_at: :desc)\n end",
"def days_past_in_week\n to_date.cwday\n end",
"def weekly_event_schedule\n utc_time = Time.now.utc\n\n # find TZs where it's ~08:45 on a Monday morning\n matching_tzs = TriggerService.monday_morning_timezones(utc_time)\n\n # create app events for each TZ\n robot = User.find(1)\n matching_tzs.each do |tz|\n next unless User.where(time_zone: tz.name).count > 0\n\n meta_data = { \n time_zone: tz.name, \n processor: 'Ns2::Processors::ScheduledNotificationsProcessor', \n utc_run_time: utc_time\n }\n # robot is dummy data, as this doesn't quite fit our model.\n AppEventService.create(robot, robot, \"weekly_event_schedule\", meta_data)\n end\n end",
"def weekly\n render json: Question.active.weekly.not_seen(@api_current_user.id).first\n end",
"def current_week_tasks\n @current_week_tasks = {monday:[], tuesday:[], wednesday:[], thursday: [], friday: [], saturday:[], sunday:[]}\n end",
"def start_date\n created_at.monday - 1.week - 1.day\n end",
"def get_posts\n posts = Post.order(\"post_date DESC\").where('post_date <= ?', Time.now)\n posts = posts.limit(count_limit) unless count_limit.blank?\n posts = posts.where(\"posts.post_date >= ?\", Time.now - past_days_limit.days) unless past_days_limit.blank?\n posts = posts.where(\"posts.blog_id IN (?)\", [blog.id] + blog_ids )\n end",
"def calc_content_updated_on\n time = created_on\n unless photos.empty? \n time = photos.last.created_on\n photos.each do |photo|\n unless photo.comments.empty?\n time = photo.comments.last.created_on if photo.comments.last.created_on > time\n end\n unless photo.likes.empty?\n time = photo.likes.last.created_on if photo.likes.last.created_on > time\n end\n end\n\n end\n invites.last.created_on > time ? invites.last.created_on : time\n end",
"def current_week\n\t\t#TODO\n\tend",
"def todays_status(user)\n goal = Goal.where(\"created_at >= ? AND user_id = ?\", Date.today.beginning_of_day, user.id).last\n if goal.nil?\n return nil \n elsif goal.content == \"\"\n return nil\n else\n return goal.content\n end\n end",
"def calculate_week_meeting_dates\n meetings = []\n @timeline_week_count.times do |wk|\n week_start = @beginning_of_first_week + wk.weeks\n # This excludes Sunday, putting the end of the week at Saturday.\n week_end = week_start.end_of_week(:sunday)\n week_mtgs = []\n @meeting_dates.each do |meeting|\n next if (meeting < @course.timeline_start) || (@course.timeline_end < meeting)\n week_mtgs << meeting if date_is_between(meeting, week_start, week_end)\n end\n meetings.push week_mtgs\n end\n meetings\n end",
"def week; end",
"def since_launch\n t_start = Time.parse(\"November 1, 2008\")\n t_current = t_start\n t_end = Time.today\n stats = []\n while (t_current < t_end) do\n puts \"Getting stats for #{t_current.to_s(:date_only)}\"\n stats << \"#{t_current.strftime('%m/%d/%Y')}, #{User.count(:conditions => ['activated_at >= ? and activated_at < ?', t_current, t_current + 1.day])}\"\n t_current += 1.day\n end\n puts stats\n end",
"def todays_recurrences\n today = Time.zone.today\n Recurrence.where('recurrences.date = ? OR recurrences.date ISNULL', today)\n end",
"def recent_weather_reports(today = Date.today)\n recent_days = (1..3).collect {|i| today - i}.collect {|d| \"#{d.year}#{d.month.to_s.rjust(2,'0')}#{d.day.to_s.rjust(2,'0')}\"}\n weather_reports.where(:date => recent_days)\n end",
"def soon range=4.days\n schedules.where(:when.lte => range.from_now)\n end",
"def completed_tasks_by_date(list,date)\n # helpers.is_today?(date)\n if (Date.today == date)\n if (list.id == self.all_task.id)\n self.completed_tasks.where([\"DATE(completed_at)=?\", date] ).order('completed_at')\n # self.completed_tasks.where([\"DATE(completed_at) BETWEEN ? AND ?\", date - 1.day , date] ).order('completed_at')\n else\n self.completed_tasks.where([\"list_id=? and DATE(completed_at)=?\",list.id, date] ).order('completed_at DESC')\n # self.completed_tasks.where([\"list_id=? and DATE(completed_at) BETWEEN ? AND ?\",list.id, date - 1.day , date] ).order('completed_at DESC')\n end\n\n else\n if (list.id == self.all_task.id)\n self.completed_tasks.where([\"DATE(completed_at) =?\",date] ).order('completed_at DESC')\n # self.completed_tasks.where([\"DATE(completed_at) =?\",date - 1.day] ).order('completed_at DESC')\n else\n self.completed_tasks.where([\"list_id=? and DATE(completed_at) =?\",list.id, date] ).order('completed_at DESC')\n # self.completed_tasks.where([\"list_id=? and DATE(completed_at) =?\",list.id, date - 1.day] ).order('completed_at DESC')\n end\n\n end\n end",
"def recent_comments\n get_or_make_references('Comment', @data, 'recent_comment_ids')\n end",
"def recent_comments\n get_or_make_references('Comment', @data, 'recent_comment_ids')\n end",
"def comments\n posts.where(:floor.gt => 0)\n end",
"def create_scheduled_notifications\n t = Time.now.utc\n x = self.notifications_created_since_recurrence_last_updated_count\n \n while (d = recurrence[x]) && t >= d\n create_notification_for_date(d)\n \n self.notifications_created_since_recurrence_last_updated_count += 1\n self.save!\n \n x += 1\n end \n end",
"def scheduled_at_date\n (model.scheduled_at || Time.zone.now + 7.days).strftime(\"%m/%-d/%Y\")\n end",
"def get_data_words_this_week\n words_per_day = Array.new( 7, 0 )\n \n # Get the current day of week\n dateCurrent = DateTime.now\n dayCurrent = dateCurrent.wday().to_i\n \n # Get date at the start of the week\n timeStart = Time.utc( dateCurrent.year, dateCurrent.month, dateCurrent.day )\n timeStart = timeStart - 24 * 60 * 60 * dayCurrent\n \n # Get date at the end of the week\n timeEnd = Time.utc( dateCurrent.year, dateCurrent.month, dateCurrent.day )\n timeEnd = timeEnd + 24 * 60 * 60 * ( 7 - dayCurrent )\n \n # Loop through entries and increment totals\n user_entries = Entry.find( :all, :conditions => [\"userid = #{id}\"] )\n \n user_entries.each do |entry|\n if( entry.words.nil? || entry.hours.nil? || entry.minutes.nil? )\n next \n end\n \n dayEntry = entry.starttime.wday().to_i\n timeEntry = Time.utc( entry.starttime.year, entry.starttime.month, entry.starttime.day, entry.starttime.hour, entry.starttime.min )\n \n if( timeStart.to_i <= timeEntry.to_i && timeEnd.to_i >= timeEntry.to_i )\n words_per_day[dayEntry] += entry.words\n end\n end\n \n # Assemble Data String\n data_string = \"\"\n \n (0..(words_per_day.length - 1)).each do |i|\n data_string = data_string + words_per_day[i].to_s\n if( i < words_per_day.length - 1 )\n data_string = data_string + \",\"\n end\n end\n \n return data_string\n end",
"def day_of_week\n\tif @current_time.wday == 0 || @current_time.wday == 6\n\t\tweek_period = \"Weekends\"\n\telse\n\t\tweek_period = \"Weekdays\"\n\tend\nend",
"def cweek\n end",
"def last_week\n Date.week(Date.today - 7)\n end",
"def get_pull_requests(days=7)\n update_pull_requests if @pull_requests.nil?\n @pull_requests.take_while do |pr|\n pr.created_at.to_date > Date.today - days\n end\n end",
"def next_weeks_open_house_dates(open_house_dow)\n now = Time.now\n dates = []\n (0...7).each do |day_offset|\n days = day_offset*24*60*60\n date = now + days\n if open_house_dow.include?(date.wday)\n dates << now + days\n end\n end\n dates\n end",
"def weekday(days)\nt = Date.today\narr = []\n days.times do\n arr << \"ok\" if t.saturday? || t.sunday?; t = t - 1\n end\n arr.count\nend",
"def sunday_after days_ago=0\n d = Date.today - days_ago\n until d.sunday?\n d += 1\n end\n d\nend",
"def dueToday\n dueTodayList = \"\"\n @toDoListArray.each do |e|\n if !e.status && e.dueDate == Date.today\n dueTodayList = dueTodayList + e.printItem + \" \"\n end\n end\n dueTodayList\n end",
"def object_created_at_timestamp_in_days(news, time)\n news.created_at > time.days.ago\n end",
"def check_for_replies(collection, days=100)\n collection.all.each do |document|\n title = document.respond_to?(:title) ? :title : :subject\n# p collection.class, document[title]\n document.dc_replies.each do |reply|\n if reply.created_at > days.days.ago\n p [document[title], reply.subject, reply.created_at]\n# reply.delete\n end\n end\n end\nend",
"def format_body\n #convert to ISO date\n d = ((@first_wkday_of_month+5) % 7) + 1\n\n #reset for Sunday\n if d == 7\n d = 0\n end\n\n #returns very long array\n padding = Array.new(d, nil)\n result = (padding + array_of_days).each_slice(7).to_a\n until result.length == 6\n result << [nil]\n end\n result\n end",
"def working_day?\n !week_end?\n end",
"def notes_to_self(user_id)\n Comment.find(:all, :order => \"created_at DESC\", :conditions => [\"commentable_id = ? AND commentable_type = ? AND comment_type = ? AND user_id = ?\", id, 'Screen', 6, user_id])\n end",
"def all_potential_meetings\n meetings = []\n day_meetings.each do |day|\n @timeline_week_count.times do |wk|\n meetings << (@beginning_of_first_week + wk.weeks).date_of_upcoming(day)\n end\n end\n meetings.sort\n end",
"def list_of_days_worked_out\n array = []\n workouts = Workout.all.find_all do |workout| \n workout.user_id == self.id \n end\n dates = workouts.each do |workout|\n array << workout.date\n end\n array\n end",
"def due_later\n TodosList.new(@todos.filter { |todo| todo.due_later? })\n end"
] | [
"0.6157928",
"0.615391",
"0.6051672",
"0.5840118",
"0.5839378",
"0.5815161",
"0.5771202",
"0.5749192",
"0.57202655",
"0.5713093",
"0.56862354",
"0.56813926",
"0.56672066",
"0.56672066",
"0.56655174",
"0.56523615",
"0.56456274",
"0.561695",
"0.56120646",
"0.56053054",
"0.5597576",
"0.555855",
"0.55207276",
"0.551349",
"0.5510232",
"0.5493437",
"0.54810715",
"0.54717183",
"0.5470603",
"0.5463773",
"0.5456194",
"0.5445091",
"0.54386276",
"0.54314137",
"0.5426091",
"0.5425099",
"0.54163617",
"0.5406665",
"0.54035723",
"0.5397954",
"0.5382674",
"0.53783506",
"0.53779465",
"0.537232",
"0.53696007",
"0.5357443",
"0.53563476",
"0.5354428",
"0.53457874",
"0.5340404",
"0.5324999",
"0.5323848",
"0.53237075",
"0.53228694",
"0.53175366",
"0.5315426",
"0.5312409",
"0.5309126",
"0.530676",
"0.53039867",
"0.5302915",
"0.5296991",
"0.5292248",
"0.5291685",
"0.527506",
"0.5274722",
"0.52729094",
"0.52702016",
"0.52650446",
"0.5263262",
"0.52631867",
"0.5261011",
"0.52577037",
"0.5252043",
"0.5248905",
"0.5238338",
"0.5232013",
"0.5230503",
"0.52289736",
"0.52289736",
"0.5209451",
"0.5206633",
"0.5206381",
"0.5203668",
"0.5203311",
"0.5191148",
"0.51720077",
"0.5169931",
"0.51681",
"0.5164549",
"0.5148646",
"0.5147257",
"0.51382536",
"0.51378036",
"0.5137312",
"0.5129391",
"0.51288706",
"0.5124577",
"0.51179177",
"0.5114234"
] | 0.52882737 | 64 |
Should change to this def photo_url if photo.present? photo.url else 'defaultuser.jpg' end end | def forum_name
if social_profile
social_profile.public_nickname
else
SocialProfile.get_anonymous_name(email)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def user_image_url # return the user image\n ((self.user_image.present?)?(self.user_image.url.to_s):nil) || self.photo_url || \"/assets/no-image.png\"\n end",
"def photo(if_not_found = \"anonymous.jpg\" )\n photo_name.nil? ? if_not_found : photo_name\n end",
"def image_url\n# Somehow the picture field returns false for 'nil?' regardless of wether it's set or not\n# So we'll have to check if the size of the url is 0 to see if it exists\n# There should be a better way to do this, probably?\nif self.picture.size == 0\n\tself.profile_picture.nil? ? 'image-unknown.jpg' : self.profile_picture\nelse \n\tself.picture.ava\nend\nend",
"def photo_url\n defined?(object.photo) ? object.photo.url : ''\n end",
"def get_profile_photo_url(photo)\n if photo != nil\n return photo.image.url(:thumb)\n else\n return 'user.png'\n end\n end",
"def profile_photo\n photos = self.profile_photos\n photos.empty? ? 'http://ragatzi.s3.amazonaws.com/uploads/profile_default_1.png' : photos.last.photo.url\n end",
"def photo_src\n self.photo.present? ? self.photo.picture_url : Prize::DEFAULT_IMG\n end",
"def full_picture\n if self.photo.file != nil\n self.photo_url\n elsif self.picture\n self.picture\n else\n \"http://res.cloudinary.com/geertkeularts/image/upload/v1467110899/defaultpicture_jj0nwa.jpg\"\n end\n end",
"def picture\n if self.avatar?\n self.avatar.url\n elsif self.image_url != nil\n self.image_url\n else\n 'generic_avatar'\n end\n end",
"def user_cover_photo(user)\n user.cover_photo.present? ? user.cover_photo.photo.url : \"default-cover-image.jpg\"\n end",
"def image_url\n image.presence or (email.present? and gravatar_url) or Guest::IMAGE_URL\n end",
"def image_url\n model.image_url.presence || h.asset_path('face.jpg')\n end",
"def cl_photo\n if photo?\n photo\n else\n 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSb0kZ2CA6D46RXZVJ3dvJPusC66fBq1uENE8jN3q8golJ73Ayd'\n end\n end",
"def image_url\n image.present? ? image.url : ''\n end",
"def image_url\n album_cover ? album_cover.photo.photo_url_full : photos.size > 0 ? photos.first.photo_url_full : nil\n end",
"def default_photo\n group.default_profile_persona==1 ? pers_photo : prof_photo\n end",
"def default_image\n \t @default_photo = PhotoUser.where('user_id = ? AND first_image = 1 AND image IS NOT NULL',self.id)\n \t if @default_photo.count > 0\n \t\t@mydefault_photo = PhotoUser.find(@default_photo[0].id)\n \t\treturn @mydefault_photo\n \t else\n \t\treturn nil\n \t end\n end",
"def get_user_image(user,size)\n if img=user.image\n img.photo.url(size)\n else\n '/images/default-user-image-'+size.to_s+'.jpg'\n end\n end",
"def profile_image(id)\n unless id.blank?\n profile = Profile.find_by_user_id(id)\n if !profile.profile_image.blank?\n return url_for_file_column(profile, \"profile_image\",\"submain\")\n else \n return \"/images/home/noprofile_photo.gif\"\n end\nend\nend",
"def choosephoto\n if self.photo.url\n photo.url(:standard)\n else\n gphoto\n end\n end",
"def logo_url\n logo? ? logo.url : profile_img\n end",
"def display_profile_image\n if self.picture.present? && self.picture.url(:small).present?\n self.picture.url(:small)\n else\n ActionController::Base.helpers.asset_path('user.png')\n end\n end",
"def getImage(type = nil)\n image = \"\"\n if !self.photo.blank?\n if !type.nil?\n image = self.photo.url(type)\n else\n image = self.photo.url\n end\n else\n image = \"person-woman-coffee-cup-large.jpg\"\n end # if !self.photo.blank? #\n end",
"def profile_photo_url(version)\n if self.photos.profile_photo.blank? \n \"#{version.to_s}_avtar.jpg\"\n else \n self.photos.profile_photo.first.name_url(version)\n end \n end",
"def default_url\n \"photo/#{version_name}.jpg\"\n end",
"def imgurl\n picref.imgurl if picref\n end",
"def profile_photo\n\tend",
"def user_avatar(user)\n user.avatar ? user.avatar.photo.url(:thumb) : \"default-avatar.jpg\"\n end",
"def image_if_present\n image? ? image : nil\n end",
"def avatar_url\n uploaded ? avatar.url : \"/assets/no_avatar.png\"\n end",
"def profile_pic_default\n if profile_pic.attached? \n profile_pic.variant(resize: \"150x150!\").processed\n else \n \"/default_profile.jpg\"\n end\n end",
"def url() processed_image.try(:url) || fallback_url end",
"def get_image_or_placeholder\n return (self.image != \"\") && (self.image != nil) ? self.image : \"http://cdn.browshot.com/static/images/not-found.png\"\n end",
"def get_image_or_placeholder\n return (self.image != \"\") && (self.image != nil) ? self.image : \"http://cdn.browshot.com/static/images/not-found.png\"\n end",
"def user_img_URL\n self.user.image_url\n end",
"def user_avatar_url(user = nil, variant:)\n if user.nil? || Rails.application.show_dummy_image?\n UsersHelper.no_avatar_path\n else\n user.avatar_image_location(variant: variant) || UsersHelper.no_avatar_path\n end.to_s\n end",
"def default_profile_pic\n profile_pics.where(is_default: true).first || ProfilePic.new\n end",
"def image_url\n url_for(object.profile_pic)\n end",
"def default_photo\n default_photo = image_tag(\"default-avatar.png\", :alt => \"Photo\", :size => \"180x200\")\n end",
"def og_image\n if @user && @user.image?\n @user.image.url\n elsif @person && @person.image?\n @person.image\n else\n root_url.chomp('/') + image_path('logo.png')\n end\n end",
"def view_photo(object,type_of_avatar,thumbnail = nil)\n default = case type_of_avatar\n when \"people\"\n then image_tag('thumb_user.jpg') \n when \"community\" \n then image_tag(\"community.jpg\")\n when \"event\" \n then image_tag(\"event_default.gif\", :size => \"110x80\")\n end\n if object\n image_tag(object.public_filename(:thumb)) unless thumbnail == nil\n image_tag(object.public_filename) if thumbnail == nil\n else\n default \n end\n rescue ActionController::RoutingError => e\n default\n end",
"def image_url\n if image.present?\n image\n else\n \"http://loremflickr.com/320/240/#{CGI.escape name}\"\n end\n end",
"def get_image_profile_pic\n last_pic = images.where(\"kind = ?\", \"profile_pic\").last\n\n if last_pic.nil?\n return \"/assets/octacat-resized.png\"\n else\n last_pic.url\n end\n end",
"def default_avatar\n not object.avatar.present?\n end",
"def thumbnail_url\n album_cover ? album_cover.photo.photo_url_small : photos.size > 0 ? photos.first.photo_url_small : nil\n end",
"def profilepicture\n if portfolio\n portfolio.photo\n end\n end",
"def imgdata fallback_to_url=true\n if picref && (href = picref.imgdata || (fallback_to_url && picref.url)).present?\n href\n end\n end",
"def default_url\n Settings.image.default_avatar\n end",
"def default_url\n Settings.image.default_avatar\n end",
"def default_url\n Settings.image.default_avatar\n end",
"def base_photo_url\n \"#{URL}#{self.photo.url}/\".gsub('/original/','/large/')\n end",
"def image\n if File.exist?(\"public/images/profile_images/#{self.username}.jpg\")\n img_path = \"/images/profile_images/#{self.username}.jpg\"\n else\n img_path = \"/images/default.jpg\"\n end\n return img_path\n end",
"def no_link_picture(instance)\n if instance.picture.present?\n image_tag(instance.picture.image.url, class: 'img-responsive')\n elsif instance.class == User\n image_tag('/images/default.png', class: 'img-responsive')\n else\n image_tag('/images/default2.jpg', class: 'img-responsive')\n end\n end",
"def safe_default_image\n if default_image.try(:filename).present? \n default_image\n elsif self.images.present?\n self.images.first\n else\n Product::generic_default_image\n end\n\n # To populate run task: assets::populate_default_image\n #default_image.presence || Product::generic_default_image\n end",
"def check_for_default_pic\n make_default! if profile.default_profile_pic.new_record?\n end",
"def default_url\n \"/images/user/avatar/\" + [version_name, \"default.png\"].compact.join('_')\n end",
"def photo_url\n url = read_attribute(\"photo_url\")\n if ! url.blank?\n url.gsub(/\\/images\\//,\"http:\\/\\/s3.amazonaws.com\\/reoagentphoto\\/\")\n else\n \"\"\n end\n end",
"def avatar_url\n return image_url if image_url\n return DEFAULT_AVATAR if email.nil?\n\n gravatar_url\n end",
"def author_picture_url\n @author_picture_url ||= begin\n if self.author_screenname\n \"http://twitter.com/api/users/profile_image/#{self.author_screenname}\"\n else\n image_path(\"default-avatar.png\")\n end\n end\n end",
"def get_pic_url\n\t if !self.pic_url\n\t\t twit_u = User.get_twitter_user_from_name(self.screen_name)\n\t\t\tputs twit_u\n\t\t\tself.pic_url = twit_u[\"profile_image_url\"]\n\t\t\tself.save!\n\t\tend\n\tend",
"def avatar_url\n if !self.fb_id.blank?\n return 'https://graph.facebook.com/%s/picture?width=90&height=90' % self.fb_id\n elsif !self.gpp_id.blank?\n return 'http://profiles.google.com/s2/photos/profile/%s?sz=90' % self.gpp_id\n else\n return ''\n end\n end",
"def cover_photo(size)\n\n \tif self.photos.length > 0\n \t\tself.photos[0].image.url(size)\n\n \telse \n \t\t\"blank.jpg\"\n \tend\n\n end",
"def author_avatar\n anonymous? ? Avatar.default.url : user.profile.avatar.url\n end",
"def photo_from_url(url)\n end",
"def picurl\n picurl = object.imgurl\n (picurl.present? && sample_page) ? valid_url(picurl, sample_page) : picurl\n end",
"def facebook_photo\n @fb_photo ||= backup_photo.photo rescue nil\n end",
"def blank_profile_pic_url(format = 'tiny')\n \"/images/profile_pictures/profile_blank_#{format}.png\"\n end",
"def image_url\n self.data['image_url'] || nil\n end",
"def small_photo_url(photo)\n server = photo.attribute('server')\n id = photo.attribute('id')\n secret = photo.attribute('secret')\n return \"http://static.flickr.com/#{server}/#{id}_#{secret}_m.jpg\"\nend",
"def photo_url(size = 500)\n photos.present? && photos.first.photo_url(size)\n end",
"def pic_one_url\n \tpic_one.url(:medium)\n end",
"def avatar_url_for(person, options = {})\n person.nil? ? nil : url\n end",
"def profile_url\n if self.profile and self.profile != \"\"\n return self.profile\n end\n return \"/blank.png\"\n end",
"def photo_url\n src = details.at('#img_primary img')['src'] rescue nil\n\n if src =~ /^(http:.+@@)/ || src =~ /^(http:.+?)\\.[^\\/]+$/\n $1 + '.jpg'\n end\n end",
"def default_photo_url\n self.dig_for_string(\"agentSummary\", \"defaultPhotoURL\")\n end",
"def avatar_photo (person)\n if person.image.attached?\n image_tag person.image\n else\n image_tag \"static/placeholder.jpg\"\n end\n end",
"def image_url_provided?\n !self.image_url.blank?\n end",
"def default_image\n end",
"def default_url\n \"https://s3.amazonaws.com/whisprdev/uploads/default_avatar.png\"\n end",
"def ensure_default_pic\n if profile.default_profile_pic.new_record? and profile.profile_pics.count > 0\n profile.profile_pics.first.make_default!\n end\n end",
"def avatar_url\r\n return @user.avatar_url\r\n end",
"def image_path\n photo = album_cover ? album_cover.photo : photos.first\n if photo\n return \"/album/#{id}/photo/#{photo.id}\"\n else\n return nil\n end\n end",
"def add_default_profile_pic\n self.picture = Rails.root.join(\"app/assets/images/profile-pic.jpg\").open\n end",
"def picture_url\n if avatar\n image_path(\"speakers/#{avatar}\")\n else\n image_path(\"default-avatar.png\")\n end\n end",
"def photo_url\n gravatar_id = Digest::MD5.hexdigest(email.downcase)\n \"https://gravatar.com/avatar/#{gravatar_id}.png\"\n end",
"def author_avatar\n is_anonymous ? Avatar.default.url : user.profile.avatar.url\n end",
"def goods_image\n return params[:image_url] if params[:image_url].present?\n return params[:based64_image] if params[:based64_image].present?\n @image.present? ? @image : 'no_pic.png'\n end",
"def og_image(image)\n\t\tbase_image = \"http://i.imgur.com/DfYlA.png\"\n\t\tif image.empty?\n\t\t\tbase_image\n\t\telse\n\t\t\timage\n\t\tend\n\tend",
"def showphoto(gender) \n @photo = gender == \"Male\" ? \"headshot_male.jpg\" : \"headshot_female.jpg\"\n end",
"def photo_path\n if doctor_image\n doctor_image.public_filename\n else\n self.doctor.photo_path\n end\n end",
"def get_thumbnail_url\n nil\n end",
"def default_url\n \"http://placehold.it/330&text=pic\"\n end",
"def preview_image_url\n nil\n end",
"def photo_for_user(user=nil)\n return photos.last\n photo = \n unless photos.empty? \n if not (took_photos = photos.by(user)).empty?\n took_photos.last\n elsif not (in_photos = photos.of(user)).empty? \n in_photos.last\n else \n photos.last\n end\n else\n nil\n end\n end",
"def img\n if self.upload.try(:image).present?\n self.upload.try(:image).url(:thumb)\n else\n 'images/cmn1.jpg'\n end\n end",
"def process_pic(handle, pic)\n # if the passed pic is blank or empty, then call the tc api\n if !pic || pic.empty?\n begin\n response = HTTParty.get(\"http://api.topcoder.com/v2/users/#{URI.escape(handle)}\")\n # if we got 404 or their profile pic is also blank, default one in\n if response.code == 404 || response['photoLink'].empty?\n pic = 'http://www.topcoder.com/wp-content/themes/tcs-responsive/i/default-photo.png'\n elsif response['photoLink'].start_with?('http://')\n pic = response['photoLink']\n else\n pic = \"http://community.topcoder.com#{response['photoLink']}\"\n end\n rescue\n # if the topcoder api just dies a horrible death then use the default pic\n pic = 'http://www.topcoder.com/wp-content/themes/tcs-responsive/i/default-photo.png'\n end\n\n end\n pic\nend",
"def photo_from_url(url)\n\t self.photo = URI.parse(url)\n\tend",
"def default_url\n ActionController::Base.helpers.image_path('user.png')\n end",
"def picture(options = nil)\n\t\tsize = \"\"\n\t\tsize = options[:size] if options != nil\n\t\ts = image.to_s\n\t\t\n\t\t# no image source\n\t\treturn User.default_pic(size) unless s.present?\n\t\t\n\t\t# image source is gravatar\n\t\tif [:gravatar, :identicon, :monsterid, :wavatar, :retro].include? s.to_sym\n\t\t\ts = s == 'gravatar' ? :mm : s.to_sym\n\t\t\treturn gravatar(s)\n\t\t\t\n\t\t# image source is something else\n\t\telse\n\t\t\tl = self.links.find_by(provider: s)\n\t\t\treturn User.default_pic unless l\n\t\t\tpic = l.picture\n\t\t\treturn User.default_pic unless pic\n\t\t\treturn pic\n\t\tend\n\t\t\n\t\t# should we really ever reach this point?\n\t\treturn User.default_pic(size)\n\tend",
"def photo_filename\n url = read_attribute(\"photo_url\")\n if ! url.blank?\n url.gsub(/\\/images\\//,\"\")\n else\n \"\"\n end\n end",
"def profile_picture\n if facebook_authentication && facebook_authentication.profile_picture\n return facebook_authentication.profile_picture\n end\n return '/images/unknown_user.png'\n end"
] | [
"0.8395404",
"0.81150687",
"0.80105835",
"0.79694754",
"0.78429514",
"0.7836549",
"0.77101934",
"0.7705525",
"0.76152164",
"0.75577146",
"0.7523068",
"0.7510459",
"0.7479614",
"0.74502176",
"0.74408376",
"0.7410629",
"0.73853743",
"0.7384063",
"0.7348051",
"0.7314135",
"0.7301489",
"0.7287592",
"0.72302634",
"0.72059184",
"0.71079105",
"0.7106073",
"0.7102082",
"0.7069577",
"0.7057588",
"0.70263016",
"0.70261484",
"0.69888407",
"0.69640106",
"0.69640106",
"0.6938251",
"0.6926264",
"0.6903079",
"0.68814313",
"0.6875794",
"0.6867213",
"0.686585",
"0.68656725",
"0.6861243",
"0.6855111",
"0.68537694",
"0.68491876",
"0.68412954",
"0.6820797",
"0.6820797",
"0.6820797",
"0.6811799",
"0.6800968",
"0.6790434",
"0.67873466",
"0.67817837",
"0.67802984",
"0.677971",
"0.67761135",
"0.67735076",
"0.675365",
"0.67433614",
"0.67257136",
"0.6717733",
"0.6708666",
"0.66928715",
"0.6691477",
"0.6678676",
"0.66766983",
"0.66746384",
"0.66582674",
"0.6649585",
"0.66483706",
"0.66455793",
"0.6642216",
"0.66392374",
"0.663035",
"0.6617687",
"0.6609784",
"0.6603048",
"0.6602112",
"0.6583289",
"0.65673894",
"0.6554566",
"0.6519005",
"0.6515714",
"0.65098804",
"0.6505221",
"0.65006614",
"0.64846605",
"0.6484004",
"0.64824873",
"0.6475728",
"0.64745444",
"0.6470829",
"0.64663166",
"0.64532363",
"0.6452638",
"0.6440804",
"0.6438613",
"0.6438045",
"0.64372677"
] | 0.0 | -1 |
GET /qx/take_offs GET /qx/take_offs.json | def index
# @qx_take_offs = Qx::TakeOff.all
per_page = params[:per_page] || 100
@q = Qx::TakeOff.ransack(params[:q])
@qx_take_offs = @q.result().paginate(:page => params[:page], :per_page => per_page)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_qx_take_off\n @qx_take_off = Qx::TakeOff.find(params[:id])\n end",
"def qx_take_off_params\n params.require(:qx_take_off).permit(:airport_id, :runway, :aircraft_type, :hirl_rcls, :hirl, :hirl_rcls_stop, :lights)\n end",
"def client_choose(offset = 10, limit = 20)\n response = Net::HTTP.get(\n URI(\"https://pokeapi.co/api/v2/pokemon/?offset=#{offset}&limit=#{limit}\")\n )\n \n JSON.parse(response)\nend",
"def update\n respond_to do |format|\n if @qx_take_off.update(qx_take_off_params)\n format.html { redirect_to @qx_take_off, notice: 'Take off was successfully updated.' }\n format.json { render :show, status: :ok, location: @qx_take_off }\n else\n format.html { render :edit }\n format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n runway = qx_take_off_params[:runway].split(\"/\")\n runway.each do |item|\n qx_take_off_params[:runway_id] = Qx::Runway.get_runay_id(qx_take_off_params[:airport_id], item)\n @qx_take_off = Qx::TakeOff.new(qx_take_off_params)\n end\n\n\n p runway\n\n respond_to do |format|\n if @qx_take_off.save\n format.html { redirect_to @qx_take_off, notice: 'Take off was successfully created.' }\n format.json { render :show, status: :created, location: @qx_take_off }\n else\n format.html { render :new }\n format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def trades\n Client.current.get(\"#{resource_url}/trades\")\n end",
"def index\n set_user\n @time_offs = TimeOff.all\n end",
"def show\n @time_off_request = TimeOffRequest.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @time_off_request }\n end\n end",
"def trip_purposes \n label = request_label(:purposes)\n \n @http_request_bundler.add(\n label, \n @url + \"/trip_purposes\", \n :get,\n head: headers,\n query: { provider_id: provider_id }\n ).response!(label)\n end",
"def destroy\n @qx_take_off.destroy\n respond_to do |format|\n format.html { redirect_to qx_take_offs_url, notice: 'Take off was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def show\n @take = Take.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @take }\n end\n end",
"def index\n respond_to do |format|\n format.html\n format.json {\n\n render :json => TimeOff.joins('LEFT OUTER JOIN request_types ON time_offs.request_type_id = request_types.id')\n .joins('INNER JOIN users ON time_offs.user_id = users.id')\n .select(\n 'time_offs.id,\n time_offs.request_start_date,\n time_offs.request_end_date,\n time_offs.status,\n time_offs.comments,\n users.name as users_name,\n request_types.name as request_type_name') }\n end\n end",
"def get_prefectures\n server_response = handle_timeouts do\n get '/1/neighborhoods.json?locale=en'\n end\n server_response['response']\n end",
"def index\n if(params[:mode] != nil && params[:mode] == \"run\")\n render :json => Playoff.exists?(:running => true)\n elsif(params[:running] != nil && params[:running])\n @playoffs = Playoff.where(running: true)\n else\n @playoffs = Playoff.all\n end\n\n end",
"def index\n @hold_requests = HoldRequest.all\n end",
"def fetchControlPoints(url, mapID)\n url = URI(url.to_s+'maps/'+mapID.to_s+'/control_points.json')\n response = Net::HTTP.get_response(url)\n data = response.body\n JSON.parse(data)\n end",
"def hours\n render json: Pings::Selector.new.hours(params)\n end",
"def index\n @intakes = Intake.all\n end",
"def trips\n get '/gtfs/trips'\n end",
"def test_that_you_may_filter_to_single_track\n getting '/v2/exercises?tracks=fruit'\n\n returns_tracks %w( fruit )\n end",
"def index\n @drop_offs = DropOff.all\n end",
"def get(incoming={})\n opts = HttpClient::Helper.symbolize_keys(incoming)\n query = {\n :guid => HttpClient::Preconditions.assert_class_or_nil('guid', HttpClient::Helper.to_uuid(opts.delete(:guid)), String),\n :user_guid => HttpClient::Preconditions.assert_class_or_nil('user_guid', HttpClient::Helper.to_uuid(opts.delete(:user_guid)), String),\n :service_key => HttpClient::Preconditions.assert_class_or_nil('service_key', opts.delete(:service_key), String),\n :limit => HttpClient::Preconditions.assert_class_or_nil('limit', opts.delete(:limit), Integer),\n :offset => HttpClient::Preconditions.assert_class_or_nil('offset', opts.delete(:offset), Integer)\n }.delete_if { |k, v| v.nil? }\n @client.request(\"/watches\").with_query(query).get.map { |hash| Apidoc::Models::Watch.new(hash) }\n end",
"def fetchStopPositions\n\t\tfetchUri(\"http://api.wmata.com/Bus.svc/json/JStops?&api_key=#{@@apiKey}\")\n\tend",
"def pokemon_api_caller\nresponse = RestClient.get \"https://pokeapi.co/api/v2/pokemon/?offset=0&limit=807\"\nresponse_JSON = JSON.parse(response)\nresponse_JSON[\"results\"]\nend",
"def index\n @cutoffs = Cutoff.all\n end",
"def history(params)\n Client.current.get(\"#{resource_url}/candles\", params)\n end",
"def trades(options)\n request :public, :get, :trades, options\n end",
"def index\n @takeouts = Takeout.all\n end",
"def time_trackings\n Easybill::Api::TimeTrackings\n end",
"def trends_available\n get(\"/trends/available.json\")\n end",
"def make_request_get_response_trend_availible\n @path_trend_availible = '/1.1/trends/available.json'\n @address_trend_availible = URI(\"#{@baseurl}#{@path_trend_availible}\")\n # Set up HTTP. Need ssL to make the connection\n @request_trend_availible = Net::HTTP::Get.new @address_trend_availible.request_uri\n @http = Net::HTTP.new @address_trend_availible.host, @address_trend_availible.port\n @http.use_ssl = true\n @http.verify_mode = OpenSSL::SSL::VERIFY_PEER\n # Issue the request.\n @request_trend_availible.oauth! @http, @consumer_key_country, @access_token_country\n @http.start\n @response_trend_availible = @http.request @request_trend_availible\n @response_trend_availible\n end",
"def get_recordings()\n @client.make_request(:get, @client.concat_user_path(\"#{CALL_PATH}/#{id}/recordings\"))[0]\n end",
"def trips\n @trip_requests = current_user.trip_requests.trips.paginate(page:params[:page], per_page:20)\n json_response(@trip_requests)\n end",
"def prepare_http_requests\n {\n label: :uber_prices,\n url: @uber_api_service.estimates_price_url([@trip.destination.lat, @trip.destination.lng], [@trip.origin.lat, @trip.origin.lng]),\n action: :get,\n options: {\n head: @uber_api_service.headers \n }\n }\n end",
"def interesting(options = {})\n response = Typhoeus::Request.get(\"#{DARKSKY_API_URL}/interesting/#{@api_key}\", DEFAULT_OPTIONS.dup.merge(options))\n JSON.parse(response.body) if response.code == 200 \n end",
"def index\n if params[:workpoint_id]\n @outs = Workpoint.find(params[:workpoint_id]).outs\n else\n @outs = Out.all\n end\n respond_to do |format|\n format.json {\n render :json => @outs, :layout => false\n }\n end\n end",
"def get_skus()\n\tputs \"Getting skus\"\n\tresponse = request_get(\"/api/sku\")\n\tputs response.body\nend",
"def index\n # @checkpoints = Checkpoint.all\n @route = Route.find(params[:route_id])\n @checkpoints = @route.checkpoints\n # render json: {\n # checkpoints: @checkpoints.to_a\n # }\n end",
"def events\n url = 'https://api.artic.edu/api/v1/exhibitions?limit=35'\n\n res = RestClient.get(url)\n JSON.parse(res)\nend",
"def get_api_results(_url)\n JSON.parse File.read('spec/inspector/stubbed_example.json')\n end",
"def index\n ok_request current_user, %w(user, opinion_polls, opinion_polls.time_slots)\n end",
"def retrieve_rates(date)\n path = \"http://openexchangerates.org/api/historical/#{date.to_s}.json?app_id=#{$app_id}\"\n response = Net::HTTP.get_response(URI.parse path)\n # TODO: error handling\n response.body\nend",
"def get\n appid = ENV['TRIMET_APP_ID']\n response = Unirest.get( \"http://developer.trimet.org/ws/v2/vehicles?appid=#{appid}\" )\n response.body\nend",
"def call_yts_api(count)\n\t \t\traw_data = RestClient.get BASE_URL, \n\t\t\t{ :params => \n\t\t\t\t{ :quality => '720p',\n\t\t\t\t :set => count,\n\t\t\t\t :limit => 50,\n\t\t\t\t :rating => 1,\n\t\t\t\t :sort => \"date\",\n\t\t\t\t :order => \"desc\" \n\t\t\t\t} \n\t\t\t}\n\t\t\tJSON.parse(raw_data)\n\t \tend",
"def index\n response = RestClient.get 'http://api.bitvalor.com/v1/order_book.json'\n data = JSON.parse(response.body)[\"bids\"]\n @fox = data.select {|element| element[0] == \"FOX\"}\n @b2u = data.select {|element| element[0] == \"B2U\"}\n @mbt = data.select {|element| element[0] == \"MBT\"}\n end",
"def index\n skickers = Skicker.order(power: :asc)\n render json: { status: 'SUCCESS', message: 'Loaded skickers', value: skickers }\n end",
"def state_events(state)\n api_return = RestClient.get('https://app.ticketmaster.com/discovery/v2/events.json?stateCode=' + state + '&apikey=' + $ticket_master_api_key)\n JSON.parse(api_return)\nend",
"def scrobble( params )\n LastFM.requires_authentication\n # Tracks are passed to the service using array notation for each of the above params\n array_params = {}\n params.each do |hkey, hval|\n hval = hval.to_i if hval.is_a?(Time)\n Array(hval).each_with_index do |aval, index|\n array_params[\"#{hkey}[#{index}]\"] = aval\n end\n end\n LastFM.post( \"track.scrobble\", array_params )\n end",
"def get_recordings(options = {})\n prepare\n @api.get_recordings(options)\n end",
"def index\n @cooking_times = CookingTime.all\n end",
"def\n\n# *********VENUE GET METHODS*********\n def get_till_receipts\n\n end",
"def prepare_http_requests\n {\n label: :lyft_prices,\n url: @lyft_api_service.price_url([@trip.destination.lat, @trip.destination.lng], [@trip.origin.lat, @trip.origin.lng]),\n action: :get,\n options: {\n head: @lyft_api_service.headers \n }\n }\n end",
"def get\n\t\t\t result = Status.find_by(windmillid: params[:windmillid]) \n \t\t\trender json: [result.as_json(only: [:status,:power,:gen,:frequency,:rotor,:wind,:pitch])]\n\tend",
"def stories(project, api_key, filter='')\n\treq = Net::HTTP::Get.new(\n \"/services/v3/projects/#{project}/stories?filter=#{filter}\",\n {'X-TrackerToken'=>api_key}\n )\n res = Net::HTTP.start(@pt_uri.host, @pt_uri.port) {|http|\n http.request(req)\n }\n\n return res.body\nend",
"def show\n @pick = @game.picks.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @pick }\n end\n end",
"def get_ticker_tape_info \n yahoo_client = YahooFinance::Client.new\n @ticker_data = yahoo_client.quotes([\"^GSPC\", \"^IXIC\", \"CL=F\", \"GC=F\", \"EURUSD=X\"], [:last_trade_price, :change, :change_in_percent])\n respond_to do |format|\n format.json { render json: @ticker_data, status: :ok }\n format.html { @ticker_data }\n end\n end",
"def index\n @caloric_intakes = CaloricIntake.all\n end",
"def fetch\n response = RestClient.get \"http://pokeapi.co/#{resource_uri}\"\n update(resource_data: JSON.parse(response))\n end",
"def index\n @money = Money.all\n require 'net/http'\n require 'json'\n @url = 'https://api.coinmarketcap.com/v1/ticker/'\n @uri = URI(@url)\n @response = Net::HTTP.get(@uri)\n @lookup_money = JSON.parse(@response)\n end",
"def index\n @punch_clocks = current_user.account.punch_clocks\n end",
"def showListPendingPays\n puts @current_user\n results = HTTParty.get(\"http://192.168.99.101:4055/lists/by_user?user_id=\"+(@current_user[\"id\"]).to_s)\n if results.code == 200\n render json: results.parsed_response, status: 200\n else\n render json: results.parsed_response, status: results.code\n end\n end",
"def index # Essentially the main page of the application proper. This is the discover page.\n #@outlets = Outlet.where(inactive: false).order(:name).paginate(page: params[:page], per_page: 20)\n offset = params[:offset].to_i\n if offset == nil\n offset = 0\n end\n if current_user.trial == true\n puts \"current_user is trial user\"\n fetch_trial_outlets\n else\n fetch_outlets(offset)\n end\n render json: @outlets\n end",
"def account_trade_list(options)\n sleep 0.25 # rate limit weight of 5, 20 requests/second limit\n request :account, :get, 'myTrades', options\n end",
"def historical_trades(options)\n request :verified, :get, :historicalTrades, options\n end",
"def show\n @kickoff = Kickoff.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @punt }\n end\n end",
"def checkin_pings\n response[\"checkinPings\"]\n end",
"def trips\n flight = Flight.where(\"id = ?\", params[:id]).take\n if flight.nil?\n render :json => {errors: \"404\"}, :status => 404\n else\n respond_with( flight.trips )\n end\n end",
"def api_url\n \"https://api.readmill.com/v2/\"\n end",
"def job_results(jobid)\r\n wait_on_status(jobid)\r\n puts \"Retrieving results for job [#{jobid}]\"\r\n uri = URI(\"http://api.idolondemand.com/1/job/result/\" + jobid)\r\n uri.query = URI.encode_www_form(:apikey => $api_key)\r\n res = Net::HTTP.get_response(uri, p_addr = $proxy_host, p_port = $proxy_port)\r\n return JSON.parse(res.body)['actions']\r\nend",
"def available_trends\n get(\"/trends/available.json\")\n end",
"def get_coins\n get(\"/getcoins\")\n end",
"def get_target\n\n\n # get the price for the last trade\n response = HTTParty.get(\"#{@@base_url}/venues/#{@venue}/stocks/#{@stock}/quote\")\n\n last = response.parsed_response[\"last\"].to_i\n\n #set the limit for the first offer as one dollar less than the last price\n limit = last - 100\n\n # Order parameters hash\n order = {\n \"account\" => @account,\n \"venue\" => @venue,\n \"symbol\" => @stock,\n \"price\" => limit,\n \"qty\" => 1,\n \"direction\" => \"buy\",\n \"orderType\" => \"limit\"\n }\n\n #make the first offer to activate the flash message in the UI\n response = HTTParty.post(\"#{@@base_url}/venues/#{@venue}/stocks/#{@stock}/orders\",\n :body => JSON.dump(order),\n :headers => {\"X-Starfighter-Authorization\" => @apikey}\n )\n\n\n # wait untill the order is closed\n id = response.parsed_response[\"id\"]\n\n status = response.parsed_response[\"open\"]\n\n while status\n\n response = HTTParty.get(\"#{@@base_url}/venues/#{@venue}/stocks/#{@stock}/orders/#{id}\",\n :headers => {\"X-Starfighter-Authorization\" => @apikey})\n\n status = response.parsed_response[\"open\"]\n\n\n end\n\n # wait a few seconds for the message in the UI to be generated\n sleep(5)\n\n # catch the treshold from the message in the UI\n response = HTTParty.get(\"https://www.stockfighter.io/gm/instances/#{@instance}\",\n :headers => {\"X-Starfighter-Authorization\" => @apikey})\n\n flash_message = response.parsed_response[\"flash\"][\"info\"]\n\n\n regex = /\\d\\d\\.\\d\\d\\.$/\n\n threshold = regex.match(flash_message).to_s.chop.to_f\n\n unless threshold\n\n puts \"There has been a problem getting the target price\"\n\n nil\n\n end\n\n puts \"Target price has been fetched\"\n\n (threshold * 100).to_i\n\n end",
"def getcount\n\t\tRails.logger.info 'Called VotesController#getcount'\n\t\tupcount = VoteStore.get_up_count(params[:id])\n\t\tdowncount = VoteStore.get_down_count(params[:id])\n\t\tRails.logger.info \"Up: #{upcount}\"\n\t\tRails.logger.info \"Down: #{downcount}\"\n\t\trender json: {count: (upcount-downcount)}\n\tend",
"def index\n @intakes = Intake.search(params[:search])\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @intakes }\n end\n end",
"def test_history_visible\n # check that a visible way is returned properly\n get :history, :id => ways(:visible_way).id\n assert_response :success\n end",
"def get_all_report\n limit = 15\n begin\n if params.has_key? :last_iso_timestamp\n from_time = DateTime.iso8601 params[:last_iso_timestamp]\n\n items = Item.active_items.order('updated_at DESC').where(\"updated_at < ?\",from_time).limit(limit)\n else \n items = Item.active_items.order('updated_at DESC').limit(limit)\n end\n #return\n render json:{success:true, lost_and_found: items.take(limit)} \n rescue Exception => e\n render json:{success:false, message: e.to_s}\n end\n end",
"def execute_request(take, skip = nil)\n\t\tmessage = {\n\t\t\ttoken: Afasgem.get_token,\n\t\t\tconnectorId: @connectorname,\n\t\t\ttake: take\n\t\t}\n\n\t\tmessage[:skip] = skip if skip\n\t\tfilter_string = get_filter_string\n\t\tmessage[:filtersXml] = filter_string if filter_string\n\n\t\tresp = @client.call(:get_data, message: message)\n\t\txml_string = resp.hash[:envelope][:body][:get_data_response][:get_data_result]\n\t\treturn [xml_string, from_xml(xml_string)]\n\tend",
"def index\n places = Place.active\n q = params[:q]\n if q\n # Sanitize the q param and find in 'tags' table, and then all matching places\n # tag = Tag.find_by(name: to_tag(q))\n places = Place.active.joins(:tags).where(tags: { name: to_tag(q) })\n end\n\n if params.key?(:open_now)\n now = Time.now.utc + (params[:tz_offset] || 0).to_i # This creates a UTC timezone that is adjusted to the local clock\n places = places.select { |p| open_now?(p.opening_hours, now, now) || open_now?(p.opening_hours, now, now - 1.day) }\n end\n render json: { 'places': places.map(&:short_data) }\n end",
"def index\n render plain: \"/api/races, offset=[#{params[:offset]}], limit=[#{params[:limit]}]\"\n end",
"def questions\n @response = self.class.get(\"/data/2.5/forecast\", @options)\n end",
"def index\n @holdings = Holding.select(\"*, ((bid - price) * qty) as gain\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @holdings }\n end\n end",
"def get_option_strikes_realtime_with_http_info(symbol, strike, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: OptionsApi.get_option_strikes_realtime ...\"\n end\n # verify the required parameter 'symbol' is set\n if @api_client.config.client_side_validation && symbol.nil?\n fail ArgumentError, \"Missing the required parameter 'symbol' when calling OptionsApi.get_option_strikes_realtime\"\n end\n # verify the required parameter 'strike' is set\n if @api_client.config.client_side_validation && strike.nil?\n fail ArgumentError, \"Missing the required parameter 'strike' when calling OptionsApi.get_option_strikes_realtime\"\n end\n if @api_client.config.client_side_validation && opts[:'stock_price_source'] && !['iex', 'bats_delayed', 'intrinio_mx', 'delayed_sip', 'utp_delayed', 'otc_delayed', 'cta_a_delayed', 'cta_b_delayed', 'nasdaq_basic'].include?(opts[:'stock_price_source'])\n fail ArgumentError, 'invalid value for \"stock_price_source\", must be one of iex, bats_delayed, intrinio_mx, delayed_sip, utp_delayed, otc_delayed, cta_a_delayed, cta_b_delayed, nasdaq_basic'\n end\n if @api_client.config.client_side_validation && opts[:'model'] && !['black_scholes', 'bjerk'].include?(opts[:'model'])\n fail ArgumentError, 'invalid value for \"model\", must be one of black_scholes, bjerk'\n end\n # resource path\n local_var_path = \"/options/strikes/{symbol}/{strike}/realtime\".sub('{' + 'symbol' + '}', symbol.to_s).sub('{' + 'strike' + '}', strike.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'stock_price_source'] = opts[:'stock_price_source'] if !opts[:'stock_price_source'].nil?\n query_params[:'model'] = opts[:'model'] if !opts[:'model'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['ApiKeyAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'ApiResponseOptionsChainRealtime')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: OptionsApi#get_option_strikes_realtime\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def measurements_get(opts = {})\n if Configuration.debugging\n Configuration.logger.debug \"Calling API: MeasurementApi#measurements_get ...\"\n end\n \n # resource path\n path = \"/measurements\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'access_token'] = opts[:'access_token'] if opts[:'access_token']\n query_params[:'user_id'] = opts[:'user_id'] if opts[:'user_id']\n query_params[:'client_id'] = opts[:'client_id'] if opts[:'client_id']\n query_params[:'connector_id'] = opts[:'connector_id'] if opts[:'connector_id']\n query_params[:'variable_id'] = opts[:'variable_id'] if opts[:'variable_id']\n query_params[:'source_id'] = opts[:'source_id'] if opts[:'source_id']\n query_params[:'start_time'] = opts[:'start_time'] if opts[:'start_time']\n query_params[:'value'] = opts[:'value'] if opts[:'value']\n query_params[:'unit_id'] = opts[:'unit_id'] if opts[:'unit_id']\n query_params[:'original_value'] = opts[:'original_value'] if opts[:'original_value']\n query_params[:'original_unit_id'] = opts[:'original_unit_id'] if opts[:'original_unit_id']\n query_params[:'duration'] = opts[:'duration'] if opts[:'duration']\n query_params[:'note'] = opts[:'note'] if opts[:'note']\n query_params[:'latitude'] = opts[:'latitude'] if opts[:'latitude']\n query_params[:'longitude'] = opts[:'longitude'] if opts[:'longitude']\n query_params[:'location'] = opts[:'location'] if opts[:'location']\n query_params[:'created_at'] = opts[:'created_at'] if opts[:'created_at']\n query_params[:'updated_at'] = opts[:'updated_at'] if opts[:'updated_at']\n query_params[:'error'] = opts[:'error'] if opts[:'error']\n query_params[:'limit'] = opts[:'limit'] if opts[:'limit']\n query_params[:'offset'] = opts[:'offset'] if opts[:'offset']\n query_params[:'sort'] = opts[:'sort'] if opts[:'sort']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = ['application/json']\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = ['application/json']\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n\n auth_names = ['quantimodo_oauth2']\n result = @api_client.call_api(:GET, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'inline_response_200_13')\n if Configuration.debugging\n Configuration.logger.debug \"API called: MeasurementApi#measurements_get. Result: #{result.inspect}\"\n end\n return result\n end",
"def fetch_price_points\n\n return error_with_go_to(\n 'tm_b_4',\n 'aux_chain_id_not_found',\n GlobalConstant::GoTo.token_setup\n ) unless @token[:aux_chain_id].present?\n aux_chain_id = @token[:aux_chain_id]\n\n price_points = KitSaasSharedCacheManagement::OstPricePoints.new([aux_chain_id]).fetch\n @api_response_data[:price_points] = price_points[aux_chain_id]\n success\n\n end",
"def index\n @tacks = Tack.all\n\n render json: @tacks\n end",
"def pings\n response[\"pings\"]\n end",
"def index\n @picks = PickName.order(:id).all\n\n respond_to do |format|\n format.html # index.html.haml\n format.json { render json: @picks }\n end\n end",
"def show\n @point_consumption = PointConsumption.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @point_consumption }\n end\n end",
"def get(path, params={})\n params = merge_set_up_params(params)\n JSON.parse(Typhoeus::Request.get(API_URL + path, :params => params).body)[\"response\"]\n end",
"def recordings_list_stored\n get \"recordings/stored\"\n end",
"def cutoffs\n end",
"def index\n @face_offs = FaceOff.all\n end",
"def index\n @ride_requests = RideRequest.all\n end",
"def took\n response['took']\n end",
"def won_offers(options={})\n won_status.offers.all(options)\n end",
"def get_trip_offers(active=true, limit=3)\n dputs __method__.to_s\n if active\n trip_offer_req = setup_http_request($active_trip_offers, @cookie, {:url_arg => [1]})\n obj_ = $active_trip_offers\n page_regex = $active_trip_offers[:url].gsub(\"?\",\"\\\\?\").gsub(\"/\", \"\\\\/\") % \"\"\n page_url = $active_trip_offers[:url]\n else\n trip_offer_req = setup_http_request($inactive_trip_offers, @cookie, {:url_arg => [1]})\n obj_ = $inactive_trip_offers\n page_regex = $inactive_trip_offers[:url].gsub(\"?\",\"\\\\?\").gsub(\"/\", \"\\\\/\") % \"\"\n page_url = $inactive_trip_offers[:url]\n end\n res = @http.request(trip_offer_req)\n trips = {}\n trips = list_trip_offers(CGI.unescapeHTML(res.body.force_encoding(\"utf-8\")))\n pages = res.body.scan(/<a href=\"#{page_regex}(\\d+)/).flatten.uniq\n # in case we got something like 1, 2, 3,4,5,6,7,8,9,21\n pages.map!(&:to_i)\n if not pages.empty?\n if pages.length >= 2\n diff = pages[-2..-1].inject(:-).abs\n if diff > 1\n pages += 1.upto(diff).map{|d| d + pages[-2]}.to_a\n pages.sort!\n pages.slice!(limit..-1)\n end\n end\n pages.map{|p|\n # Using $active_trip_offers for the method, but specify the URL\n trip_offer_req = setup_http_request(obj_, @cookie, {:url => page_url, :url_arg => [p]})\n res = @http.request(trip_offer_req)\n trips = trips.merge(list_trip_offers(res.body))\n }\n end\n trips\n end",
"def train_api(mapid)\n url_safe_mapid = URI.encode(mapid)\n apiKey = \"73b6a68e9e4f450792ba730b84d8c506\"\n apiLink = \"http://lapi.transitchicago.com/api/1.0/ttarrivals.aspx?key=#{apiKey}&mapid=#{url_safe_mapid}\"\n apiResults = open(apiLink).read\n return Hash.from_xml(apiResults)\n end",
"def index\n @used_bikes = UsedBike.all\n\n render json: @used_bikes, each_serializer: Web::V1::UsedBikeSerializer\n end",
"def set_takeout\n @takeout = Takeout.find(params[:id])\n end",
"def set_takeout\n @takeout = Takeout.find(params[:id])\n end"
] | [
"0.6381655",
"0.5682251",
"0.55859435",
"0.54620636",
"0.5439219",
"0.5405428",
"0.53818953",
"0.5358595",
"0.5263549",
"0.5156631",
"0.51341677",
"0.51270944",
"0.5113039",
"0.5068326",
"0.5066294",
"0.5064037",
"0.5054704",
"0.5027866",
"0.50010014",
"0.5000613",
"0.4991291",
"0.49831575",
"0.497515",
"0.49649853",
"0.49628115",
"0.49501172",
"0.4948777",
"0.4946153",
"0.49419877",
"0.4909703",
"0.48959622",
"0.4889256",
"0.4881222",
"0.4879907",
"0.48692787",
"0.48670685",
"0.48643413",
"0.4864076",
"0.48500633",
"0.48456714",
"0.4845407",
"0.4835785",
"0.4834544",
"0.48316136",
"0.4816471",
"0.48113135",
"0.48065493",
"0.4803432",
"0.48005557",
"0.48002058",
"0.47956872",
"0.47905102",
"0.4790374",
"0.4785653",
"0.47849473",
"0.4776824",
"0.47750193",
"0.47643748",
"0.47640806",
"0.4759249",
"0.47563836",
"0.47559077",
"0.4752713",
"0.47474816",
"0.47434813",
"0.47321162",
"0.47283378",
"0.47214654",
"0.47205934",
"0.47174582",
"0.47117594",
"0.4711437",
"0.4706334",
"0.46953452",
"0.46902484",
"0.46858478",
"0.46848598",
"0.46843535",
"0.46839473",
"0.46783718",
"0.46755037",
"0.4671997",
"0.4671269",
"0.46692306",
"0.46651223",
"0.46610078",
"0.46583247",
"0.46579945",
"0.46536067",
"0.4647316",
"0.46447295",
"0.46438053",
"0.46408063",
"0.46381855",
"0.46374828",
"0.46332166",
"0.46296784",
"0.4629123",
"0.46291062",
"0.46291062"
] | 0.6435972 | 0 |
GET /qx/take_offs/1 GET /qx/take_offs/1.json | def show
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n # @qx_take_offs = Qx::TakeOff.all\n \n per_page = params[:per_page] || 100\n @q = Qx::TakeOff.ransack(params[:q])\n @qx_take_offs = @q.result().paginate(:page => params[:page], :per_page => per_page)\n end",
"def set_qx_take_off\n @qx_take_off = Qx::TakeOff.find(params[:id])\n end",
"def client_choose(offset = 10, limit = 20)\n response = Net::HTTP.get(\n URI(\"https://pokeapi.co/api/v2/pokemon/?offset=#{offset}&limit=#{limit}\")\n )\n \n JSON.parse(response)\nend",
"def show\n @time_off_request = TimeOffRequest.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @time_off_request }\n end\n end",
"def show\n @take = Take.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @take }\n end\n end",
"def create\n runway = qx_take_off_params[:runway].split(\"/\")\n runway.each do |item|\n qx_take_off_params[:runway_id] = Qx::Runway.get_runay_id(qx_take_off_params[:airport_id], item)\n @qx_take_off = Qx::TakeOff.new(qx_take_off_params)\n end\n\n\n p runway\n\n respond_to do |format|\n if @qx_take_off.save\n format.html { redirect_to @qx_take_off, notice: 'Take off was successfully created.' }\n format.json { render :show, status: :created, location: @qx_take_off }\n else\n format.html { render :new }\n format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @qx_take_off.update(qx_take_off_params)\n format.html { redirect_to @qx_take_off, notice: 'Take off was successfully updated.' }\n format.json { render :show, status: :ok, location: @qx_take_off }\n else\n format.html { render :edit }\n format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def qx_take_off_params\n params.require(:qx_take_off).permit(:airport_id, :runway, :aircraft_type, :hirl_rcls, :hirl, :hirl_rcls_stop, :lights)\n end",
"def trip_purposes \n label = request_label(:purposes)\n \n @http_request_bundler.add(\n label, \n @url + \"/trip_purposes\", \n :get,\n head: headers,\n query: { provider_id: provider_id }\n ).response!(label)\n end",
"def get(incoming={})\n opts = HttpClient::Helper.symbolize_keys(incoming)\n query = {\n :guid => HttpClient::Preconditions.assert_class_or_nil('guid', HttpClient::Helper.to_uuid(opts.delete(:guid)), String),\n :user_guid => HttpClient::Preconditions.assert_class_or_nil('user_guid', HttpClient::Helper.to_uuid(opts.delete(:user_guid)), String),\n :service_key => HttpClient::Preconditions.assert_class_or_nil('service_key', opts.delete(:service_key), String),\n :limit => HttpClient::Preconditions.assert_class_or_nil('limit', opts.delete(:limit), Integer),\n :offset => HttpClient::Preconditions.assert_class_or_nil('offset', opts.delete(:offset), Integer)\n }.delete_if { |k, v| v.nil? }\n @client.request(\"/watches\").with_query(query).get.map { |hash| Apidoc::Models::Watch.new(hash) }\n end",
"def pokemon_api_caller\nresponse = RestClient.get \"https://pokeapi.co/api/v2/pokemon/?offset=0&limit=807\"\nresponse_JSON = JSON.parse(response)\nresponse_JSON[\"results\"]\nend",
"def index\n respond_to do |format|\n format.html\n format.json {\n\n render :json => TimeOff.joins('LEFT OUTER JOIN request_types ON time_offs.request_type_id = request_types.id')\n .joins('INNER JOIN users ON time_offs.user_id = users.id')\n .select(\n 'time_offs.id,\n time_offs.request_start_date,\n time_offs.request_end_date,\n time_offs.status,\n time_offs.comments,\n users.name as users_name,\n request_types.name as request_type_name') }\n end\n end",
"def index\n set_user\n @time_offs = TimeOff.all\n end",
"def trades\n Client.current.get(\"#{resource_url}/trades\")\n end",
"def fetch\n response = RestClient.get \"http://pokeapi.co/#{resource_uri}\"\n update(resource_data: JSON.parse(response))\n end",
"def show\n @pick = @game.picks.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @pick }\n end\n end",
"def make_request_get_response_trend_availible\n @path_trend_availible = '/1.1/trends/available.json'\n @address_trend_availible = URI(\"#{@baseurl}#{@path_trend_availible}\")\n # Set up HTTP. Need ssL to make the connection\n @request_trend_availible = Net::HTTP::Get.new @address_trend_availible.request_uri\n @http = Net::HTTP.new @address_trend_availible.host, @address_trend_availible.port\n @http.use_ssl = true\n @http.verify_mode = OpenSSL::SSL::VERIFY_PEER\n # Issue the request.\n @request_trend_availible.oauth! @http, @consumer_key_country, @access_token_country\n @http.start\n @response_trend_availible = @http.request @request_trend_availible\n @response_trend_availible\n end",
"def show\n @point_consumption = PointConsumption.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @point_consumption }\n end\n end",
"def index\n # @checkpoints = Checkpoint.all\n @route = Route.find(params[:route_id])\n @checkpoints = @route.checkpoints\n # render json: {\n # checkpoints: @checkpoints.to_a\n # }\n end",
"def get\n\t\t\t result = Status.find_by(windmillid: params[:windmillid]) \n \t\t\trender json: [result.as_json(only: [:status,:power,:gen,:frequency,:rotor,:wind,:pitch])]\n\tend",
"def hours\n render json: Pings::Selector.new.hours(params)\n end",
"def get\n appid = ENV['TRIMET_APP_ID']\n response = Unirest.get( \"http://developer.trimet.org/ws/v2/vehicles?appid=#{appid}\" )\n response.body\nend",
"def get_api_results(_url)\n JSON.parse File.read('spec/inspector/stubbed_example.json')\n end",
"def retrieve_rates(date)\n path = \"http://openexchangerates.org/api/historical/#{date.to_s}.json?app_id=#{$app_id}\"\n response = Net::HTTP.get_response(URI.parse path)\n # TODO: error handling\n response.body\nend",
"def show\n @kickoff = Kickoff.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @punt }\n end\n end",
"def call_yts_api(count)\n\t \t\traw_data = RestClient.get BASE_URL, \n\t\t\t{ :params => \n\t\t\t\t{ :quality => '720p',\n\t\t\t\t :set => count,\n\t\t\t\t :limit => 50,\n\t\t\t\t :rating => 1,\n\t\t\t\t :sort => \"date\",\n\t\t\t\t :order => \"desc\" \n\t\t\t\t} \n\t\t\t}\n\t\t\tJSON.parse(raw_data)\n\t \tend",
"def get(path, params={})\n params = merge_set_up_params(params)\n JSON.parse(Typhoeus::Request.get(API_URL + path, :params => params).body)[\"response\"]\n end",
"def get(id)\n _get(\"/quick-scan/#{id}\") { |json| json }\n end",
"def test_that_you_may_filter_to_single_track\n getting '/v2/exercises?tracks=fruit'\n\n returns_tracks %w( fruit )\n end",
"def index\n @intakes = Intake.all\n end",
"def trips\n get '/gtfs/trips'\n end",
"def load_poke url\n output = JSON.parse(RestClient.get(url))\n if Poke.find_by(id: output['id'])\n Poke.update(output['id'],\n name: output['name'],\n weight: output['weight'],\n order: output['order'])\n else\n Poke.create(id: output['id'],\n name: output['name'],\n weight: output['weight'],\n order: output['order'])\n end\n print 'p' + output['id'].to_s + ' ' if VERBOSE==1\nend",
"def destroy\n @qx_take_off.destroy\n respond_to do |format|\n format.html { redirect_to qx_take_offs_url, notice: 'Take off was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def trips\n flight = Flight.where(\"id = ?\", params[:id]).take\n if flight.nil?\n render :json => {errors: \"404\"}, :status => 404\n else\n respond_with( flight.trips )\n end\n end",
"def fetchControlPoints(url, mapID)\n url = URI(url.to_s+'maps/'+mapID.to_s+'/control_points.json')\n response = Net::HTTP.get_response(url)\n data = response.body\n JSON.parse(data)\n end",
"def get_skus()\n\tputs \"Getting skus\"\n\tresponse = request_get(\"/api/sku\")\n\tputs response.body\nend",
"def show\n @tick_track = TickTrack.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @tick_track }\n end\n end",
"def prepare_http_requests\n {\n label: :uber_prices,\n url: @uber_api_service.estimates_price_url([@trip.destination.lat, @trip.destination.lng], [@trip.origin.lat, @trip.origin.lng]),\n action: :get,\n options: {\n head: @uber_api_service.headers \n }\n }\n end",
"def api_url\n \"https://api.readmill.com/v2/\"\n end",
"def serv_json\n \"http://api.dribbble.com/shots/popular?page=1\"\n end",
"def index\n @money = Money.all\n require 'net/http'\n require 'json'\n @url = 'https://api.coinmarketcap.com/v1/ticker/'\n @uri = URI(@url)\n @response = Net::HTTP.get(@uri)\n @lookup_money = JSON.parse(@response)\n end",
"def trips\n @trip_requests = current_user.trip_requests.trips.paginate(page:params[:page], per_page:20)\n json_response(@trip_requests)\n end",
"def index\n if(params[:mode] != nil && params[:mode] == \"run\")\n render :json => Playoff.exists?(:running => true)\n elsif(params[:running] != nil && params[:running])\n @playoffs = Playoff.where(running: true)\n else\n @playoffs = Playoff.all\n end\n\n end",
"def index\n @hold_requests = HoldRequest.all\n end",
"def interesting(options = {})\n response = Typhoeus::Request.get(\"#{DARKSKY_API_URL}/interesting/#{@api_key}\", DEFAULT_OPTIONS.dup.merge(options))\n JSON.parse(response.body) if response.code == 200 \n end",
"def get_prefectures\n server_response = handle_timeouts do\n get '/1/neighborhoods.json?locale=en'\n end\n server_response['response']\n end",
"def show\n if params[:id] == 'latest'\n @pitch = Pitch.last\n else\n @pitch = Pitch.find(params[:id])\n end\n if params[:view] == 'invest'\n api_success @pitch.as_json(details: true)\n else\n api_success @pitch.as_json\n end\n end",
"def history(params)\n Client.current.get(\"#{resource_url}/candles\", params)\n end",
"def get_ticker_tape_info \n yahoo_client = YahooFinance::Client.new\n @ticker_data = yahoo_client.quotes([\"^GSPC\", \"^IXIC\", \"CL=F\", \"GC=F\", \"EURUSD=X\"], [:last_trade_price, :change, :change_in_percent])\n respond_to do |format|\n format.json { render json: @ticker_data, status: :ok }\n format.html { @ticker_data }\n end\n end",
"def stories(project, api_key, filter='')\n\treq = Net::HTTP::Get.new(\n \"/services/v3/projects/#{project}/stories?filter=#{filter}\",\n {'X-TrackerToken'=>api_key}\n )\n res = Net::HTTP.start(@pt_uri.host, @pt_uri.port) {|http|\n http.request(req)\n }\n\n return res.body\nend",
"def show\n @touchpoint = Touchpoint.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @touchpoint }\n end\n end",
"def get_recordings()\n @client.make_request(:get, @client.concat_user_path(\"#{CALL_PATH}/#{id}/recordings\"))[0]\n end",
"def get_reading\n\t\turi = URI.parse('http://localhost:5000/thermometers.json')\n\t\tthermo_response = Net::HTTP.get_response(uri)\n\t\tcheck = thermo_response.body\n\t\tj = JSON.parse(check)\n\tend",
"def prepare_http_requests\n {\n label: :lyft_prices,\n url: @lyft_api_service.price_url([@trip.destination.lat, @trip.destination.lng], [@trip.origin.lat, @trip.origin.lng]),\n action: :get,\n options: {\n head: @lyft_api_service.headers \n }\n }\n end",
"def show\n @time_point = TimePoint.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @time_point }\n end\n end",
"def skjar1\n Apis.client.get('/tv/skjar1')\n end",
"def index\n if params[:workpoint_id]\n @outs = Workpoint.find(params[:workpoint_id]).outs\n else\n @outs = Out.all\n end\n respond_to do |format|\n format.json {\n render :json => @outs, :layout => false\n }\n end\n end",
"def show\n @watcher = Watcher.includes(:tweets).friendly.find(params[:id])\n @statuses = Status.all\n respond_to do |format|\n format.html { render :show, offset: params[:offset]}\n format.json { render json: @watcher.tweets }\n end\n end",
"def single_predicted_usage\r\n meter = get_meters[3]\r\n date_ranges = get_date_ranges\r\n dtps = Meter.get_daily_time_periods [meter]\r\n\r\n# usage = meter.predicted_usage_by_meter(date_ranges, dtps)\r\n# usage = meter.predicted_usage_by_time(date_ranges, dtps)\r\n# usage = meter.detailed_predicted_usage_by_meter(date_ranges, dtps)\r\n usage = meter.detailed_predicted_usage_by_time(date_ranges, dtps)\r\n\r\n render :json => usage\r\n\r\n# redirect_to action: 'index'\r\n end",
"def scrobble( params )\n LastFM.requires_authentication\n # Tracks are passed to the service using array notation for each of the above params\n array_params = {}\n params.each do |hkey, hval|\n hval = hval.to_i if hval.is_a?(Time)\n Array(hval).each_with_index do |aval, index|\n array_params[\"#{hkey}[#{index}]\"] = aval\n end\n end\n LastFM.post( \"track.scrobble\", array_params )\n end",
"def takeover_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: Class1InstanceApi.takeover ...'\n end\n # resource path\n local_var_path = '/takeover'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'InlineResponse2001'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['instanceId', 'token']\n\n new_options = opts.merge(\n :operation => :\"Class1InstanceApi.takeover\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: Class1InstanceApi#takeover\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def state_events(state)\n api_return = RestClient.get('https://app.ticketmaster.com/discovery/v2/events.json?stateCode=' + state + '&apikey=' + $ticket_master_api_key)\n JSON.parse(api_return)\nend",
"def show\n @click_thru = ClickThru.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @click_thru }\n end\n end",
"def get_json(state, city)\n HTTParty.get(\"http://api.wunderground.com/api/b0938627f87459c4/conditions/q/#{state}/#{city}.json\").parsed_response\nend",
"def index\n @tips_tricks = @tips_tricks.published.recent.page(params[:page]).per(10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @tips_tricks }\n end\n end",
"def show\n @pickup = Pickup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @pickup }\n end\n end",
"def status\n power_of_attorney = ClaimsApi::PowerOfAttorney.find_using_identifier_and_source(id: params[:id],\n source_name: source_name)\n raise ::Common::Exceptions::ResourceNotFound.new(detail: 'Resource not found') unless power_of_attorney\n\n render json: power_of_attorney, serializer: ClaimsApi::PowerOfAttorneySerializer\n end",
"def show\n @self_drive_price = SelfDrivePrice.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @self_drive_price }\n end\n end",
"def show\n @one_time_stop = OneTimeStop.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @one_time_stop }\n end\n end",
"def get(path, params={})\n params = merge_set_up_params(params)\n @token = \"b3688c52-9235-45ca-b01f-c5b2b83a4f4f\"\n @result = Typhoeus::Request.get(API_URL + path, :params => params,\n :headers => {\"Authorization\" => \"Basic#{@token}\"})\n puts @result.body\n # check if the url looks correct in the log\n puts @result.effective_url\n # parse the result to json\n return JSON.parse(@result.body)\n end",
"def measurements_get(opts = {})\n if Configuration.debugging\n Configuration.logger.debug \"Calling API: MeasurementApi#measurements_get ...\"\n end\n \n # resource path\n path = \"/measurements\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'access_token'] = opts[:'access_token'] if opts[:'access_token']\n query_params[:'user_id'] = opts[:'user_id'] if opts[:'user_id']\n query_params[:'client_id'] = opts[:'client_id'] if opts[:'client_id']\n query_params[:'connector_id'] = opts[:'connector_id'] if opts[:'connector_id']\n query_params[:'variable_id'] = opts[:'variable_id'] if opts[:'variable_id']\n query_params[:'source_id'] = opts[:'source_id'] if opts[:'source_id']\n query_params[:'start_time'] = opts[:'start_time'] if opts[:'start_time']\n query_params[:'value'] = opts[:'value'] if opts[:'value']\n query_params[:'unit_id'] = opts[:'unit_id'] if opts[:'unit_id']\n query_params[:'original_value'] = opts[:'original_value'] if opts[:'original_value']\n query_params[:'original_unit_id'] = opts[:'original_unit_id'] if opts[:'original_unit_id']\n query_params[:'duration'] = opts[:'duration'] if opts[:'duration']\n query_params[:'note'] = opts[:'note'] if opts[:'note']\n query_params[:'latitude'] = opts[:'latitude'] if opts[:'latitude']\n query_params[:'longitude'] = opts[:'longitude'] if opts[:'longitude']\n query_params[:'location'] = opts[:'location'] if opts[:'location']\n query_params[:'created_at'] = opts[:'created_at'] if opts[:'created_at']\n query_params[:'updated_at'] = opts[:'updated_at'] if opts[:'updated_at']\n query_params[:'error'] = opts[:'error'] if opts[:'error']\n query_params[:'limit'] = opts[:'limit'] if opts[:'limit']\n query_params[:'offset'] = opts[:'offset'] if opts[:'offset']\n query_params[:'sort'] = opts[:'sort'] if opts[:'sort']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = ['application/json']\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = ['application/json']\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n\n auth_names = ['quantimodo_oauth2']\n result = @api_client.call_api(:GET, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'inline_response_200_13')\n if Configuration.debugging\n Configuration.logger.debug \"API called: MeasurementApi#measurements_get. Result: #{result.inspect}\"\n end\n return result\n end",
"def booking_api\n :ride_pilot\n end",
"def show\n @beattape = Beattape.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @beattape }\n end\n end",
"def show\n skippable_fields = [:map_fns, :record_ids, :measure_attributes]\n @measure = Measure.by_user(current_user).without(*skippable_fields).find(params[:id])\n if stale? last_modified: @measure.updated_at.try(:utc), etag: @measure.cache_key\n @measure_json = MultiJson.encode(@measure.as_json(except: skippable_fields))\n respond_with @measure do |format|\n format.json { render json: @measure_json }\n end\n end\n end",
"def index\n @picks = PickName.order(:id).all\n\n respond_to do |format|\n format.html # index.html.haml\n format.json { render json: @picks }\n end\n end",
"def show\n @predicts = Predict.where(params[:matchpick_id])\n @matchpick = Matchpick.find(params[:id])\n \n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @predict }\n end\n end",
"def show\n @time_track = TimeTrack.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @time_track }\n end\n end",
"def show\n @wait_time = WaitTime.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @wait_time }\n end\n end",
"def set_api\n @url = 'https://api.coinmarketcap.com/v1/ticker/'\n @uri = URI(@url)\n @response = Net::HTTP.get(@uri)\n # converts response to a Ruby hash \n @lookup_crypto = JSON.parse(@response)\n @gain_loss = 0\n end",
"def\n\n# *********VENUE GET METHODS*********\n def get_till_receipts\n\n end",
"def index\n @drop_offs = DropOff.all\n end",
"def index\n @caloric_intakes = CaloricIntake.all\n end",
"def index\n response = RestClient.get 'http://api.bitvalor.com/v1/order_book.json'\n data = JSON.parse(response.body)[\"bids\"]\n @fox = data.select {|element| element[0] == \"FOX\"}\n @b2u = data.select {|element| element[0] == \"B2U\"}\n @mbt = data.select {|element| element[0] == \"MBT\"}\n end",
"def show\n @loud_check = LoudCheck.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @loud_check }\n end\n end",
"def show\n @my_time_trial = MyTimeTrial.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @my_time_trial }\n end\n end",
"def call_poke_url(url)\n JSON.parse(\n HTTP\n .get(URI(url))\n )\nend",
"def index\n skickers = Skicker.order(power: :asc)\n render json: { status: 'SUCCESS', message: 'Loaded skickers', value: skickers }\n end",
"def get_option_strikes_realtime_with_http_info(symbol, strike, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: OptionsApi.get_option_strikes_realtime ...\"\n end\n # verify the required parameter 'symbol' is set\n if @api_client.config.client_side_validation && symbol.nil?\n fail ArgumentError, \"Missing the required parameter 'symbol' when calling OptionsApi.get_option_strikes_realtime\"\n end\n # verify the required parameter 'strike' is set\n if @api_client.config.client_side_validation && strike.nil?\n fail ArgumentError, \"Missing the required parameter 'strike' when calling OptionsApi.get_option_strikes_realtime\"\n end\n if @api_client.config.client_side_validation && opts[:'stock_price_source'] && !['iex', 'bats_delayed', 'intrinio_mx', 'delayed_sip', 'utp_delayed', 'otc_delayed', 'cta_a_delayed', 'cta_b_delayed', 'nasdaq_basic'].include?(opts[:'stock_price_source'])\n fail ArgumentError, 'invalid value for \"stock_price_source\", must be one of iex, bats_delayed, intrinio_mx, delayed_sip, utp_delayed, otc_delayed, cta_a_delayed, cta_b_delayed, nasdaq_basic'\n end\n if @api_client.config.client_side_validation && opts[:'model'] && !['black_scholes', 'bjerk'].include?(opts[:'model'])\n fail ArgumentError, 'invalid value for \"model\", must be one of black_scholes, bjerk'\n end\n # resource path\n local_var_path = \"/options/strikes/{symbol}/{strike}/realtime\".sub('{' + 'symbol' + '}', symbol.to_s).sub('{' + 'strike' + '}', strike.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'stock_price_source'] = opts[:'stock_price_source'] if !opts[:'stock_price_source'].nil?\n query_params[:'model'] = opts[:'model'] if !opts[:'model'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['ApiKeyAuth']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'ApiResponseOptionsChainRealtime')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: OptionsApi#get_option_strikes_realtime\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def index\n @trips = Trip.desc.all\n @latest_trip = @trips.first\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @trips }\n end\n end",
"def show\n @trail = Trail.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @trail }\n end\n end",
"def show\n @trail = Trail.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @trail }\n end\n end",
"def time_trackings\n Easybill::Api::TimeTrackings\n end",
"def get_events_from_api(name)\n #make the web request\n name1 = name\n performer_link = \"https://app.ticketmaster.com/discovery/v2/events.json?keyword=#{name1}&countrycode=US&apikey=ShI4Sd340EJ32f1k6rUgkYPocLSO2qTq\"\n response_string = RestClient.get(performer_link)\n response_hash = JSON.parse(response_string)\nend",
"def show\n @reloud_check = ReloudCheck.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @reloud_check }\n end\n end",
"def index\n render plain: \"/api/races, offset=[#{params[:offset]}], limit=[#{params[:limit]}]\"\n end",
"def index\n @takeouts = Takeout.all\n end",
"def index\n @tacks = Tack.all\n\n render json: @tacks\n end",
"def fetchStopPositions\n\t\tfetchUri(\"http://api.wmata.com/Bus.svc/json/JStops?&api_key=#{@@apiKey}\")\n\tend",
"def get_api(origin, destination, departure)\n origin = get_place(origin)\n destination = get_place(destination)\n if origin == nil || destination == nil\n return nil\n end\n url = URI(\"https://rapidapi.p.rapidapi.com/apiservices/browseroutes/v1.0/US/USD/en-US/#{origin}/#{destination}/#{departure}?inboundpartialdate=anytime\")\n\n response_hash = access_api(url)\nend",
"def get_recordings(options = {})\n prepare\n @api.get_recordings(options)\n end",
"def index\n @trips = Trip.all\n\n render json: @trips\n end"
] | [
"0.61263233",
"0.61080295",
"0.57958066",
"0.5583166",
"0.55722374",
"0.5480653",
"0.5380247",
"0.53569096",
"0.52966285",
"0.5242218",
"0.51876336",
"0.51696175",
"0.5145002",
"0.51378965",
"0.51368654",
"0.5098539",
"0.5096234",
"0.5094366",
"0.5087687",
"0.50844526",
"0.50755006",
"0.50661314",
"0.50594836",
"0.5055887",
"0.50509936",
"0.504895",
"0.5036847",
"0.50336945",
"0.50304276",
"0.5029189",
"0.5027675",
"0.50259256",
"0.50192213",
"0.50143296",
"0.5014163",
"0.50129455",
"0.50060993",
"0.5004211",
"0.49890396",
"0.49835348",
"0.49728635",
"0.49712732",
"0.49700958",
"0.496883",
"0.49683696",
"0.496621",
"0.4953717",
"0.49512628",
"0.49402142",
"0.49318546",
"0.49318427",
"0.49299693",
"0.4923839",
"0.49119744",
"0.48836905",
"0.48797092",
"0.48743337",
"0.4872852",
"0.4871857",
"0.4870095",
"0.48670802",
"0.48585966",
"0.48534402",
"0.4853067",
"0.4850007",
"0.48427302",
"0.48426786",
"0.48402128",
"0.48364842",
"0.48357227",
"0.48345217",
"0.48309714",
"0.4822008",
"0.48178175",
"0.4816167",
"0.48144734",
"0.48115972",
"0.48101977",
"0.48086843",
"0.48070154",
"0.48066494",
"0.48058468",
"0.48001167",
"0.4796738",
"0.47912553",
"0.47875014",
"0.47859654",
"0.47842446",
"0.47827902",
"0.47819933",
"0.47819933",
"0.47815076",
"0.47773033",
"0.4775328",
"0.4769397",
"0.47687292",
"0.4768564",
"0.47677913",
"0.47649348",
"0.47618443",
"0.47568917"
] | 0.0 | -1 |
POST /qx/take_offs POST /qx/take_offs.json | def create
runway = qx_take_off_params[:runway].split("/")
runway.each do |item|
qx_take_off_params[:runway_id] = Qx::Runway.get_runay_id(qx_take_off_params[:airport_id], item)
@qx_take_off = Qx::TakeOff.new(qx_take_off_params)
end
p runway
respond_to do |format|
if @qx_take_off.save
format.html { redirect_to @qx_take_off, notice: 'Take off was successfully created.' }
format.json { render :show, status: :created, location: @qx_take_off }
else
format.html { render :new }
format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_qx_take_off\n @qx_take_off = Qx::TakeOff.find(params[:id])\n end",
"def qx_take_off_params\n params.require(:qx_take_off).permit(:airport_id, :runway, :aircraft_type, :hirl_rcls, :hirl, :hirl_rcls_stop, :lights)\n end",
"def update\n respond_to do |format|\n if @qx_take_off.update(qx_take_off_params)\n format.html { redirect_to @qx_take_off, notice: 'Take off was successfully updated.' }\n format.json { render :show, status: :ok, location: @qx_take_off }\n else\n format.html { render :edit }\n format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_wings_and_take_off\nend",
"def index\n # @qx_take_offs = Qx::TakeOff.all\n \n per_page = params[:per_page] || 100\n @q = Qx::TakeOff.ransack(params[:q])\n @qx_take_offs = @q.result().paginate(:page => params[:page], :per_page => per_page)\n end",
"def destroy\n @qx_take_off.destroy\n respond_to do |format|\n format.html { redirect_to qx_take_offs_url, notice: 'Take off was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def create\n set_user\n @time_off = TimeOff.new(time_off_params)\n\n respond_to do |format|\n if @time_off.save\n format.html { redirect_to user_time_off_path(@user, @time_off), notice: 'Time off was successfully created.' }\n format.json { render action: 'show', status: :created, location: @time_off }\n else\n format.html { render action: 'new' }\n format.json { render json: @time_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @test_ride = TestRide.new(test_ride_params)\n @control = BookingTimeControl.book_time_control_method(params)\n \n if @control == true\n if @test_ride.save\n render json: @test_ride, status: :created\n # Create Notifications\n @test_ride.delay.test_ride_booking_notification(I18n.t('Notification.test_ride_booking'), I18n.t('Email.test_ride_booking_dealer'), I18n.t('Email.test_ride_booking_user'), params)\n else\n render json: @test_ride.errors, status: :unprocessable_entity\n end\n else\n render json: @control\n end\n end",
"def drop_off_params\n params.require(:drop_off).permit(:name, :start_time, :end_time)\n end",
"def create\n @time_off_instance = TimeOffInstance.new(time_off_instance_params)\n #@time_off_instance.employee.hours_left -= @time_off_instance.hours_used\n respond_to do |format|\n if @time_off_instance.save\n self.use_pto_hours\n #print \"HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE HERE\"\n format.html { redirect_to @time_off_instance, notice: 'Time off instance was successfully created.' }\n format.json { render :show, status: :created, location: @time_off_instance }\n else\n format.html { render :new }\n format.json { render json: @time_off_instance.errors, status: :unprocessable_entity }\n end\n end\n end",
"def scrobble( params )\n LastFM.requires_authentication\n # Tracks are passed to the service using array notation for each of the above params\n array_params = {}\n params.each do |hkey, hval|\n hval = hval.to_i if hval.is_a?(Time)\n Array(hval).each_with_index do |aval, index|\n array_params[\"#{hkey}[#{index}]\"] = aval\n end\n end\n LastFM.post( \"track.scrobble\", array_params )\n end",
"def create\n @takeout = Takeout.new(takeout_params)\n\n respond_to do |format|\n if @takeout.save\n format.html { redirect_to @takeout, notice: 'Takeout was successfully created.' }\n format.json { render action: 'show', status: :created, location: @takeout }\n else\n format.html { render action: 'new' }\n format.json { render json: @takeout.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @take = Take.new(params[:take])\n\n respond_to do |format|\n if @take.save\n format.html { redirect_to @take, notice: 'Take was successfully created.' }\n format.json { render json: @take, status: :created, location: @take }\n else\n format.html { render action: \"new\" }\n format.json { render json: @take.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @kickoff = Kickoff.new(params[:kickoff])\n\n respond_to do |format|\n if @kickoff.save\n format.html { redirect_to @kickoff, notice: 'Kickoff was successfully created.' }\n format.json { render json: @kickoff, status: :created, location: @kickoff }\n else\n format.html { render action: \"new\" }\n format.json { render json: @kickoff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n @stocktake = Stocktake.new(stocktake_params)\n \n #在庫マスターのIDをセットする\n\tupdate_flag = 0\n set_inventories_id\n\t\n respond_to do |format|\n if @stocktake.save\n format.html { redirect_to @stocktake, notice: 'Stocktake was successfully created.' }\n format.json { render :show, status: :created, location: @stocktake }\n else\n format.html { render :new }\n format.json { render json: @stocktake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def request(state)\n case state\n\n when :start\n puts 'got to request, case \"start\"'\n jdata = JSON.generate({sprinkle_agent_id: @sprinkle_agent_id, state: 1, key: @key})\n puts \"jdata --> #{jdata}\"\n RestClient.put \"http://#{@host_with_port}/sprinkles/1\", jdata, {:content_type => :json}\n\n when :stop\n jdata = JSON.generate({sprinkle_id: @sprinkle_id, state: 'stop', key: @key})\n puts \"jdata --> #{jdata}\"\n # RestClient.put \"http://#{@host_with_port}/sprinkle_agents/1\", jdata, {:content_type => :json}\n end\n end",
"def create_ride_info \n @color = params[:car_color] \n @model = params[:car_model] \n @number = params[:car_number] \n @seats = params[:seats] \n @smoking = params[:smoking] \n @air_conditioner = params[:air_conditioner]\n @trunk = params[:trunk] \n @name = params[:name] \n @girls = params[:girls]\n @gentlemen = params[:gentlemen] \n @@request.car_color = @color \n @@request.car_model= @model \n @@request.car_number= @number \n @@request.seats= @seats \n @@request.smoking= @smoking \n @@request.air_conditioner= @air_conditioner\n @@request.trunk= @trunk \n @@request.name= @name \n @@request.girls_only = @girls\n @@request.gentlemen_only = @gentlemen \n @@request.save \n @@checkpoint.each do |x| \n c = Checkpoint.new \n c.place_id = x.id \n c.request_id = @@request.id \n c.save \n end \n redirect_to root_path \n end",
"def create\n @hold_request = HoldRequest.new(hold_request_params)\n\n respond_to do |format|\n if @hold_request.save\n format.html { redirect_to @hold_request, notice: 'Hold request was successfully created.' }\n format.json { render :show, status: :created, location: @hold_request }\n else\n format.html { render :new }\n format.json { render json: @hold_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @hold_request = HoldRequest.new(hold_request_params)\n\n respond_to do |format|\n if @hold_request.save\n format.html { redirect_to @hold_request, notice: 'Hold request was successfully created.' }\n format.json { render :show, status: :created, location: @hold_request }\n else\n format.html { render :new }\n format.json { render json: @hold_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_takeout\n @takeout = Takeout.find(params[:id])\n end",
"def set_takeout\n @takeout = Takeout.find(params[:id])\n end",
"def time_off_params\n params.require(:time_off).permit(:leave_type, :user_id, :date, :number_of_hours, :from, :to, :reason, :type, :status)\n end",
"def create\n @ride = Ride.new(ride_params)\n @ride.assembly_time = params[:ride][:assembly_time]\n @ride.destination_time = params[:ride][:destination_time]\n @ride.check_points = params[:ride][:check_points]\n if @ride.save\n @ride.delay.call_notification(I18n.t('Notification.ride_created'), I18n.t('Email.ride_created'))\n render json: @ride, status: :created\n else\n render json: @ride.errors, status: :unprocessable_entity\n end\n end",
"def create\n @measure = Measure.new(measure_params)\n logger.debug \"New Measure: #{@measure.attributes.inspect}\"\n respond_to do |format|\n if @measure.save\n format.html { redirect_to @measure, notice: 'Measure was successfully created.' }\n format.json { render :show, status: :created, location: @measure }\n MeasureCleanupJob.set(wait: 36.hours).perform_later @measure\n else\n format.html { render :new }\n format.json { render json: @measure.errors, status: :unprocessable_entity }\n end\n end\n @@pumpStatus = \"|\" + measure_params[:pump_status].to_s + \"|\"\n logger.debug \"@@pumpStatus has been set to: #{@@pumpStatus}\"\n end",
"def create\n @outcome_timepoint = OutcomeTimepoint.new(params[:outcome_timepoint])\n\t@outcome_timepoint.save\n end",
"def checked_out_params\n params.require(:checked_out).permit(:user_id, :bike_id, :checkout_time, :checkin_time, :fixed, :problem)\n end",
"def strokes_expected_shot_params\n params[:strokes_expected_shot]\n end",
"def create\n @playoff = Playoff.new(playoff_params)\n\n respond_to do |format|\n if @playoff.save\n format.html { redirect_to @playoff, notice: 'Playoff was successfully created.' }\n format.json { render :show, status: :created, location: @playoff }\n else\n format.html { render :new }\n format.json { render json: @playoff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def test_post_sample_traces\n header 'Content-Type', 'application/json'\n\n (0..4).each do |i|\n data = File.read \"sample-traces/#{i}.json\"\n post('/traces', data, 'CONTENT_TYPE': 'application/json')\n assert last_response.ok?\n end\n end",
"def add(params)\n headers = {\n 'Cookie' => @context[:koha_rest_api_cookie],\n 'Content-Type' => 'application/json'\n }\n\n http = Net::HTTP.new(\"xkoha\", 8081)\n uri = URI(intranet(:koha_rest_api) + \"holds\")\n res = http.post(uri, params.to_json, headers)\n expect(res.code).to eq(\"201\"), \"got unexpected #{res.code} when adding hold.\\nResponse body: #{res.body}\"\n res.body\n end",
"def ticket_params\n params\n .require(:ticket)\n .permit(\n :dreport_id,\n :number,\n :client,\n :starts_at,\n :ends_at,\n :status,\n :kind,\n :observation,\n :job_1, :job_2, :job_3, :job_4, :job_5, :job_6,\n :mat_1, :mat_2, :mat_3, :mat_4, :mat_5, :mat_6,\n :nt, :sma\n )\n end",
"def takeout_params\n params.require(:takeout).permit(:number)\n end",
"def set_taking\n @taking = Taking.find(params[:id])\n end",
"def create\n \n #timestamp={{FellAsleepAt}}&total_sleep={{TotalTimeSleptInSeconds}}&deep={{TimeInDeepSleepSeconds}}&light={{TimeInLightSleepSeconds}}&awake={{TimeAwakeSeconds}}\n \n json_hash = Hash.new\n \n description = params[:description]\n \n timestamp = params[:timestamp]\n total_sleep_seconds = params[:total_sleep]\n deep_sleep_seconds = params[:deep]\n light_sleep_seconds = params[:light]\n awake_seconds = params[:awake]\n \n if timestamp.nil? || total_sleep_seconds.nil?\n \n puts 'timestamp is nil or total_sleep_seconds is nil :('\n \n else\n \n total_sleep = total_sleep_seconds / 60.0\n deep = deep_sleep_seconds / 60.0\n light = light_sleep_seconds / 60.0\n awake = awake_seconds / 60.0\n \n post_to_twitter = false\n post_to_facebook = false\n \n # FellAsleepAt is formatted: August 23, 2013 at 11:01PM\n # Convert to Runkeeper's preferred format: Sat, 1 Jan 2011 00:00:00\n timestamp_datetime = DateTime.parse(timestamp)\n formatted_timestamp = timestamp_datetime.strftime(\"%a, %d %b %Y %H:%M:%S\")\n \n json_hash['timestamp'] = formatted_timestamp\n json_hash['total_sleep'] = deep\n json_hash['deep'] = deep\n json_hash['light'] = light\n json_hash['awake'] = awake\n json_hash['post_to_twitter'] = post_to_twitter\n json_hash['post_to_facebook'] = post_to_facebook\n \n url = 'https://api.runkeeper.com/sleep'\n \n uri = URI.parse(url)\n \n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n request = Net::HTTP::Post.new(uri.request_uri)\n request[\"Authorization\"] = \"Bearer \" + RUNKEEPER_ACCESS_TOKEN\n request[\"Content-Type\"] = \"application/vnd.com.runkeeper.NewSleep+json\"\n request.body = json_hash.to_json\n \n response = http.request(request)\n \n puts response.body\n \n end\n \n @sleep = json_hash\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sleep }\n end\n \n end",
"def test_submit_rent\n quote_details = SAMPLE_QUOTE_DETAILS.deep_dup\n quote_details['fixed_price_services_requested']['price'] = 1200\n params_hash = {\n udprn: '123456',\n services_required: SAMPLE_SERVICES_REQUIRED,\n payment_terms: SAMPLE_PAYMENT_TERMS,\n quote_details: quote_details.to_json\n }\n first_params_hash = params_hash.deep_dup\n first_params_hash[:quote_details] = SAMPLE_QUOTE_DETAILS.to_json\n post :new_quote_for_property, first_params_hash\n post :new, params_hash\n assert_response 200\n\n quote = Agents::Branches::AssignedAgents::Quote.last\n ### Now lets submit the quote\n post :submit, { udprn: '123456', quote_id: quote.id }\n response = Oj.load(@response.body)\n assert_response 200\n assert_equal response['message'], 'The quote is accepted'\n end",
"def taken_step_params\n params.require(:taken_step).permit(:count, :done_on)\n end",
"def trip_params\n params.require(:trip).permit(:starts_on, :ends_on, :name, :location, {:item_ids => []})\n end",
"def mlsAutoPreQual\r\n # Check max runs\r\n daily_pdq_cnt = Output.where(\"runid LIKE ?\", \"%#{Time.now.to_date}\").length\r\n max_count = 400 - daily_pdq_cnt\r\n max_count = 2\r\n puts \"max_count: #{max_count}\"\r\n\r\n # Build POST call\r\n base_url = \"https://api.mpoapp.com/v1/properties/_search?api_key=#{@@MLS_TOKEN}\"\r\n h = {\"Content-Type\" => 'application/json; charset=UTF-8', \"Cache-Control\" => \"no-cache\"}\r\n\r\n data = {:from => 0, :size => max_count, :sort => {:_created => {:order => \"desc\"}}}\r\n\r\n query_string = \"primary.price.listingPrice: [250000 TO 5000000]\" # price condition\r\n query_string += \" AND mls.onMarketDate:[#{(Time.now.to_date - params[:dayCount].to_i).to_s} TO *]\" # days on market\r\n query_string += \" AND construction.yearBuilt: {* TO #{(Time.now.year.to_i-1).to_s}}\" # build year condition\"\r\n\r\n data[:query] = {:bool => {:minimum_should_match => 1, \r\n :must => [\r\n {:query_string => {:query => query_string}},\r\n {:terms => {\"primary.mpoPropType\" => [\"singleFamily\", \"condominium\", \"loft\", \"apartment\"]}},\r\n {:terms => {\"primary.mpoStatus\" => [\"active\"]}},\r\n {:terms => {\"mls.knownShortSale\" => [\"false\"]}},\r\n \r\n ]\r\n }\r\n }\r\n\r\n # PDQ params\r\n pdq_params = {:path => \"Mls\"}\r\n runID = \"#{pdq_params[:path]}: #{Date.today.to_s}\"\r\n\r\n # Get Results\r\n response = HTTParty.post(base_url, :body => data.to_json, :headers => h)\r\n json_result = JSON.parse(response.to_json) \r\n results = json_result[\"results\"]\r\n\r\n # Get Addresses\r\n results.each do |r|\r\n values = r[\"primary\"][\"address\"]\r\n\r\n # Build street\r\n street = values[\"streetNum\"]\r\n street += \" #{values[\"streetDirection\"]}\" unless values[\"streetDirection\"].nil?\r\n street += \" #{values[\"streetName\"]}\" unless values[\"streetName\"].nil?\r\n street += \" #{values[\"streetSuffix\"]}\" unless values[\"streetSuffix\"].nil?\r\n street += \" Unit #{values[\"unitNum\"]}\" unless values[\"unitNum\"].nil?\r\n\r\n # Build city, state, zip\r\n csz = \"#{values[\"city\"]}, #{values[\"state\"]} #{values[\"zipCode\"]}\"\r\n\r\n # Run through PDQ\r\n # street = MiscFunctions.addressStringClean(street)\r\n # csz = MiscFunctions.addressStringClean(csz)\r\n print \"#{street} + #{csz}\"\r\n geo_data = GeoFunctions.getGoogleGeoByAddress(street, csz)\r\n\r\n a = PdqEngine.computeDecision(geo_data, pdq_params, runID)\r\n end\r\n\r\n @outputs = Output.all\r\n @forexport = false\r\n render 'outputs/index'\r\n end",
"def takeover_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: Class1InstanceApi.takeover ...'\n end\n # resource path\n local_var_path = '/takeover'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'InlineResponse2001'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || ['instanceId', 'token']\n\n new_options = opts.merge(\n :operation => :\"Class1InstanceApi.takeover\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: Class1InstanceApi#takeover\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create\n bikes = Bike.all\n bikes.each do |bike|\n if bike.next_date_inspected == Time.now.in_time_zone\n bike.passed_inspection = false\n end\n end\n create_checkout_values\n respond_to do |format|\n if @checked_out.save\n create_bike_values\n UserMailer.check_out_email(@checked_out.user).deliver\n format.html { redirect_to home_check_out_path, notice: \"The bike was successfully checked out by #{@checked_out.user.email}.\" }\n format.json { render action: 'show', status: :created, location: @checked_out }\n else\n format.html { redirect_to home_check_out_path, :flash => @checked_out.errors }\n format.json { render json: @checked_out.errors, status: :unprocessable_entity }\n end\n end\n end",
"def buy\n @showtime = Showtime.find_by(id: params[:id])\n if @showtime.openSeats > 0\n @showtime.openSeats -= 1\n else\n @showtime.openSeats == 0\n end\n @showtime.save\n render \"show.json.jbuilder\"\n end",
"def create\n @cooking_time = CookingTime.new(cooking_time_params)\n\n respond_to do |format|\n if @cooking_time.save\n format.html { redirect_to cooking_times_path, notice: 'Cooking time was successfully created.' }\n format.json { render action: 'show', status: :created, location: @cooking_time }\n else\n format.html { render action: 'new' }\n format.json { render json: @cooking_time.errors, status: :unprocessable_entity }\n end\n end\n end",
"def save_pricing\n\tif !(Time.current.beginning_of_day..Time.current.end_of_day).include?(@aws_offer_code.offer_code_update_statuses.last.try(:for_date))\n\t\toffer_code_update_statuses = @aws_offer_code.offer_code_update_statuses.create(for_date: Time.current)\n\t\t@json_res[\"terms\"][\"OnDemand\"].each do |k,v|\n\t\t\tprice_json = v.to_a\n\t\t\tget_final_json = price_json[0][1]\n\t\t\tprice_dimension_json = get_final_json[\"priceDimensions\"].to_a[0][1]\n\t\t\tcurrency_json = price_dimension_json[\"pricePerUnit\"].to_a[0]\n\t\t\tcurrency_id = MasterCurrency.get_id(currency_json[0])\n\t\t\tprice_per_unit = currency_json[1]\n\t\t\tif get_final_json.present?\n\t\t\t\taws_price_params = {\n\t\t\t\t\tsku: get_final_json[\"sku\"],\n\t\t\t\t\teffective_date: get_final_json[\"effectiveDate\"],\n\t\t\t\t\tcurrency_id: currency_id,\n\t\t\t\t\tdescription: price_dimension_json[\"description\"],\n\t\t\t\t\tbegin_range: price_dimension_json[\"beginRange\"],\n\t\t\t\t\tend_range: price_dimension_json[\"endRange\"],\n\t\t\t\t\tunit: price_dimension_json[\"unit\"],\n\t\t\t\t\tprice_per_unit: price_per_unit\n\t\t\t\t}\n\t\t\t\tputs \"#{get_final_json[\"sku\"]} saved\"\n\t\t\t\toffer_code_update_statuses.aws_offer_code_prices.create(aws_price_params)\n\t\t\tend\n\t\tend\n\telse\n\t\tputs \"Price can be update once in a day\"\n\tend\nend",
"def measure_params\n params.require(:measure).permit(:temp_out, :temp_in, :humidity_out, :humidity_in, :pump_status)\n end",
"def intake_params\n params.require(:intake).permit(:user_id, :current_own, :current_rent, :current_monthly_pay, :looking_for, :areas, :amenities, :price_range, :bedrooms, :reason, :looking_time, :other_agent, :other_agent_name, :seen_liked, :seen_no_buy, :when_moved, :best_times, :price_min, :price_max, :buying_with, :downpayment, :source, :budget_monthly, :met_lender, :preapproved, :preapproved_amount)\n end",
"def create\n @ticket.save\n\n respond_with(@story, @ticket)\n end",
"def set_strokes_expected_shot\n @strokes_expected_shot = StrokesExpectedShot.find(params[:id])\n end",
"def party_time(options)\n self.class.post(\"/open311/v2/requests.json\", query: options, headers: {'api_key' => ENV['API_KEY']})\n end",
"def tombstone_timehold_params\n params.require(:tombstone_timehold).permit(:tombstoneJSON, :permanent, :rating)\n end",
"def create\n @offpost = Offpost.new(params[:offpost])\n\n respond_to do |format|\n if @offpost.save\n flash[:notice] = 'Offpost was successfully created.'\n format.html { redirect_to :back }\n format.xml { render :xml => @offpost, :status => :created, :location => @offpost }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @offpost.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @intake = Intake.new(params[:intake])\n\n respond_to do |format|\n if @intake.save\n flash[:success] = \"Intake was successfully created.\"\n format.html { redirect_to @intake, notice: 'Intake was successfully updated.' }\n else\n format.html { render action: \"new\" }\n format.json { render json: @intake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def test_submit\n quote_details = SAMPLE_QUOTE_DETAILS.deep_dup\n quote_details['fixed_price_services_requested']['price'] = 1200\n params_hash = {\n udprn: SAMPLE_UDPRN,\n services_required: SAMPLE_SERVICES_REQUIRED,\n payment_terms: SAMPLE_PAYMENT_TERMS,\n quote_details: quote_details.to_json\n }\n first_params_hash = params_hash.deep_dup\n first_params_hash[:quote_details] = SAMPLE_QUOTE_DETAILS.to_json\n post :new_quote_for_property, first_params_hash\n post :new, params_hash\n assert_response 200\n\n quote = Agents::Branches::AssignedAgents::Quote.last\n ### Now lets submit the quote\n post :submit, { udprn: SAMPLE_UDPRN, quote_id: quote.id }\n response = Oj.load(@response.body)\n assert_response 200\n assert_equal response['message'], 'The quote is accepted'\n end",
"def postReadings(info, state)\r\n params = {\r\n :device_id => info.deviceId,\r\n :sensor_data => [\r\n {\r\n :type => \"Temperature\",\r\n :value => state.temperature,\r\n :time => Time.now.to_i\r\n },\r\n {\r\n :type => \"Humidity\",\r\n :value => state.humidity,\r\n :time => Time.now.to_i\r\n }\r\n ]\r\n }\r\n res = apiPostJson(\"readings\", params)\r\n if res.status != 201\r\n $LOG.warn(\"Failed to post readings to backend! Status: #{res.status}, Response: #{res.body}\")\r\n end\r\n end",
"def pick_params\n params.require(:pick).permit(:game_id, :user_id, :pick, :result, :week)\n end",
"def post\n Typhoeus.post(@url,\n body: @results_hash.to_json,\n headers: { 'Content-Type' => 'application/json' })\n end",
"def create\n @pick = Pick.new(pick_params)\n @pick.result = 0\n @pick.user_id = current_user.id\n @pick.game_id = Game.find(params[:game_id]).id\n @pick.week = Game.find(params[:game_id]).week\n\n respond_to do |format|\n if @pick.save\n make_updates(@pick.week)\n format.html { redirect_to current_user, notice: 'Pick was successfully created.' }\n format.json { render :show, status: :created, location: @pick }\n else\n format.html { render :new }\n format.json { render json: @pick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def tip_won\n @tip = Tip.find(params[:tip][:tip_id])\n @prediction = []\n @tip.predictions.each do |prediction|\n @prediction.push(prediction.result.betWon)\n end \n if @prediction.all?\n @tip.won = true\n elsif @prediction.include? false\n @tip.won = false\n else\n @tip.won = nil\n end\n @tip.save\n render json: { data: @tip }.to_json\n end",
"def make_req(point,geofence_id)\n\t# Deleting old fences\n\t@@vars['leaving_a'] = []\n\t@@vars['arriving_a'] = []\n\t@@vars['radii'] = {}\n\n\tdebug_c = []\n\tdebug_r = []\n\tdebug_t = []\n\n\t@@vars['request_counter'] += 1\n\n\tdata = {\n\t\t'device' => {\n\t\t\t'name' => 'Sim_fake_device',\n\t\t\t'foreign_id' => 'sim_id_1',\n\t\t\t'location' => {\n\t\t\t\t'lon' => \"#{point[0]}\", \n\t\t\t\t'lat' => \"#{point[1]}\"\n\t\t\t}\n\t\t},\n\t\t'speed' => \"#{@@vars['walk_speed_ms'][point[2]]}\",\n\t\t'geo_object_ids' => [@@vars['sim_geofence_ids'][geofence_id]]\n\t}\n\n\tresponse = @@vars['access_token'].put(\"/api/v3/devices.json\", JSON[data], HEADERS)\n\n\t@@vars['request_size'] += response.body.length\n\n\tif response && response.code == \"200\"\n\t puts \"Got #{JSON[response.body]['sleep_until'].length} new fences\" if @@vars['sim_static_walk'] < 1\n\t for fence in JSON[response.body]['sleep_until'] do\n\t \tdebug_c << fence['center']\n\t \tdebug_r << fence['radius']\n\t \tdebug_t << fence['status']\n\t \tif fence['type'] == 'circle'\n\n\t \t\tcenter = @@vars['factory'].point(fence['center'][0],fence['center'][1])\n\t \t\tpoly = center.buffer(fence['radius'])\n\n\t \t\t# Add poly to leaving list\n\t \t\tif fence['status'] == 'LEAVING'\n\t \t\t\t#puts \"Got leaving fence\"\n\t \t\t\t@@vars['leaving_a'] << poly\n\t \t\t\t@@vars['radii'][poly] = fence['radius']\n\t \t\tend\n\n\t \t\t# Add poly to arriving list\n \t\t\tif fence['status'] == 'ARRIVING'\n \t\t\t\t#puts \"Got arriving fence\"\n \t\t\t\t@@vars['arriving_a'] << poly\n \t\t\t\t@@vars['radii'][poly] = fence['radius']\n \t\t\tend\n\t \telse\n\t \t\tputs \"ERROR! The returned fences should only be circles!\"\n\t \tend\n\t \t#puts poly\n\t end\n\t #puts jj JSON[response.body]['sleep_until'] # require \"json\" for this to work.\n\telse\n\t #puts jj JSON[response.body]['sleep_until'] # require \"json\" for this to work.\n\tend\n\n\tif @@vars['html_debug']\n\t\t#puts \"{\\\"centers\\\": #{debug_c}, \\\"radii\\\": #{debug_r}}\"\n\t\t@@vars['html_debug_aux_text'] << \"{\\\"centers\\\": #{debug_c}, \\\"radii\\\": #{debug_r}, \\\"type\\\": #{debug_t}}\"\n\tend\nend",
"def quake_params\n params.require(:quake).permit(:device_id, :elapsed, :p, :s)\n end",
"def cutoffs\n end",
"def create\n @cutoff = Cutoff.new(cutoff_params)\n\n respond_to do |format|\n if @cutoff.save\n format.html { redirect_to @cutoff, notice: 'Cutoff was successfully created.' }\n format.json { render :show, status: :created, location: @cutoff }\n else\n format.html { render :new }\n format.json { render json: @cutoff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\t\turi = URI.parse(Counter::Application.config.simplyurl)\n\t\thttp = Net::HTTP.new(uri.host, uri.port)\n\t\t\n\t\trequest = Net::HTTP::Post.new('/offsets.json')\n\t\tputs params\n\t\tputs params.slice(*['custids','acctids','itemids'])\n\t\t\n\t\t# ok, this join stuff is bogus - it encodes properly, but the other side only sees the last element and loses the array type - it's just string\n\t\t# this way, i 'split' it at the other side to recover my array\n\t\t# it should work without the join/split crap, but it doesn't\n\t\trequest.set_form_data({:custids => ( params['custids'] || []).join(','), :acctids => ( params['acctids'] || []).join(','), :itemids => ( params['itemids'] || []).join(','), :amount => params['amount'], :type => params['type']})\n\t\t\n\t\tputs request.body\n\t\t\n\t\tresponse = http.request(request)\n\t\tputs response.body\n\n respond_to do |format|\n format.html { render :text => response.code == :ok ? \"\" : response.body, status: response.code }\n format.json { render :text => response.code == :ok ? \"\" : response.body, status: response.code }\n end\n end",
"def create\n @tenure = Tenure.new(tenure_params)\n\n if @tenure.save\n audit(@tenure, current_user)\n render json: @tenure, status: :created\n else\n render json: @tenure.errors, status: :unprocessable_entity\n end\n end",
"def time_off_instance_params\n params.require(:time_off_instance).permit(:employee_id, :hours_used, :reason)\n end",
"def trip_purposes \n label = request_label(:purposes)\n \n @http_request_bundler.add(\n label, \n @url + \"/trip_purposes\", \n :get,\n head: headers,\n query: { provider_id: provider_id }\n ).response!(label)\n end",
"def takeout_params\n params.require(:takeout).permit(:name, :score, :comment, :best_order)\n end",
"def index\n set_user\n @time_offs = TimeOff.all\n end",
"def create\n @touchpoint = Touchpoint.new(params[:touchpoint])\n\n respond_to do |format|\n if @touchpoint.save\n format.html { redirect_to @touchpoint, notice: 'Touchpoint was successfully created.' }\n format.json { render json: @touchpoint, status: :created, location: @touchpoint }\n else\n format.html { render action: \"new\" }\n format.json { render json: @touchpoint.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_time_request\n TimeRequest.create(\n time: [ Time.new(2000, 1, 1, 14, 0, 0, \"+00:00\").utc, Time.new(2000, 1, 1, 13, 0, 0, \"+00:00\").utc, Time.new(2000, 1, 1, 12, 0, 0, \"+00:00\").utc ].sample,\n reservation: Reservation.all.sample,\n check_in: [true, false].sample,\n status: 'pending'\n )\nend",
"def caloric_intake_params\n params.require(:caloric_intake).permit(:consumed_calories, :calories_date)\n end",
"def time_punch_params\n params.require(:time_punch).permit(:punch, :work_segment_id, :status)\n end",
"def punch_master_params\n params.require(:punch_master).permit(:start_time, :end_time, :reader_in, :reader_out, :full_day_hrs, :half_day_hrs, :status)\n end",
"def measurements_post(opts = {})\n if Configuration.debugging\n Configuration.logger.debug \"Calling API: MeasurementApi#measurements_post ...\"\n end\n \n # resource path\n path = \"/measurements\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'access_token'] = opts[:'access_token'] if opts[:'access_token']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = ['application/json']\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = ['application/json']\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(opts[:'body'])\n \n\n auth_names = ['quantimodo_oauth2']\n result = @api_client.call_api(:POST, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'inline_response_200_13')\n if Configuration.debugging\n Configuration.logger.debug \"API called: MeasurementApi#measurements_post. Result: #{result.inspect}\"\n end\n return result\n end",
"def create\n @pickup_point_time_detail = PickupPointTimeDetail.new(pickup_point_time_detail_params)\n\n respond_to do |format|\n if @pickup_point_time_detail.save\n format.html { redirect_to pickup_point_time_details_path, notice: 'Pickup point time detail was successfully created.' }\n format.json { render :show, status: :created, location: @pickup_point_time_detail }\n else\n format.html { render :new }\n format.json { render json: @pickup_point_time_detail.errors, status: :unprocessable_entity }\n end\n end\n end",
"def hold_params\n params.require(:hold).permit(:book_id, :user_id, :request_date, :release_date)\n end",
"def create\n @trip = Trip.new(trip_params)\n authorize @trip\n @trip.submitter = current_account.accountable\n\n\n @trip.estimated_expenses.each do |exp|\n exp.requests.each do |req|\n req.amount_from_total = req.percentrequested * exp.total\n req.destination = @trip.destination\n req.expense_type = 'estimated'\n end\n end\n\n respond_to do |format|\n if @trip.save\n format.html { redirect_to home_index_path, notice: 'Trip was successfully created.' }\n format.json { render :show, status: :created, location: @trip }\n else\n format.html { render :new }\n format.json { render json: @trip.errors, status: :unprocessable_entity }\n end\n end\n end",
"def trip_params\n params.require(:trip).permit(:date, :time, :pickup_location, :destination, :price, :seats_available, :driver_id, :profile_id)\n end",
"def tally_params\n params.require(:tally).permit(:total_time_m, :total_time_s, :total_time_ms, :total_points, :total_faults, :title, :qualifying_score, :qualifying_scores, :event_ids => [], :entrant_ids => [] )\n end",
"def test_trackpoints\n user = create(:user)\n other_user = create(:user)\n create(:trace, :visibility => \"trackable\", :latitude => 51.51, :longitude => -0.14, :user => user) do |trace|\n create(:tracepoint, :trace => trace, :trackid => 1, :latitude => (51.510 * GeoRecord::SCALE).to_i, :longitude => (-0.140 * GeoRecord::SCALE).to_i)\n create(:tracepoint, :trace => trace, :trackid => 2, :latitude => (51.511 * GeoRecord::SCALE).to_i, :longitude => (-0.141 * GeoRecord::SCALE).to_i)\n end\n create(:trace, :visibility => \"identifiable\", :latitude => 51.512, :longitude => 0.142) do |trace|\n create(:tracepoint, :trace => trace, :latitude => (51.512 * GeoRecord::SCALE).to_i, :longitude => (0.142 * GeoRecord::SCALE).to_i)\n end\n\n get :trackpoints, :params => { :xmin => -1, :xmax => 1, :ymin => 51, :ymax => 52, :baselong => 0, :basey => 0, :masterscale => 1 }\n assert_response :success\n assert_equal \"application/x-shockwave-flash\", response.content_type\n assert_match /^FWS/, response.body\n assert_equal 80, response.body.length\n\n get :trackpoints, :params => { :xmin => -1, :xmax => 1, :ymin => 51, :ymax => 52, :baselong => 0, :basey => 0, :masterscale => 1, :token => other_user.tokens.create.token }\n assert_response :success\n assert_equal \"application/x-shockwave-flash\", response.content_type\n assert_match /^FWS/, response.body\n assert_equal 67, response.body.length\n\n get :trackpoints, :params => { :xmin => -1, :xmax => 1, :ymin => 51, :ymax => 52, :baselong => 0, :basey => 0, :masterscale => 1, :token => user.tokens.create.token }\n assert_response :success\n assert_equal \"application/x-shockwave-flash\", response.content_type\n assert_match /^FWS/, response.body\n assert_equal 74, response.body.length\n end",
"def create\n PlaceHoldsJob.perform_later barcodes:,\n session_token: current_user.session_token,\n patron_key: current_user.patron_key,\n catkey: params['catkey'],\n pickup_library: params['pickup_library'],\n pickup_by_date: params['pickup_by_date']\n\n redirect_to result_path\n end",
"def papertest_params\n params.require(:papertest).permit(:user_id, :paper_id, :end_at, :deleted_at)\n end",
"def playoff_params\n params.require(:playoff).permit(:team1, :team2, :title, :running, :g1, :g2, :g3, :g4, :g5, :g6, :g7)\n end",
"def request_params\n params.require(:request).permit(:start_date, :end_date, :notes, :status, :tool_id)\n end",
"def new_booking_request(pid:, check_in:, check_out:)\n owner_uid = DatabaseConnection.query(\"SELECT * FROM properties WHERE pid = #{pid}\").first['uid']\n message(receiver_uid: owner_uid, content: \"New booking request for your space: #{pid} from #{@name}\\nCheck In Date: #{check_in}\\nCheck In Date: #{check_in}\\nClick the link below to confirm\")\n Booking.create(renter_uid: @uid, pid: pid, check_in: check_in, check_out: check_out)\n end",
"def send_adjustment_to_karma_server(tag_name, adjustment_value)\n resource = RestClient::Resource.new(\"http://#{KARMA_SERVER_HOSTNAME}#{@tags[tag_name]['adjustments_path']}\", \"\", KARMA_API_KEY)\n resource.post(\"adjustment[value]=#{adjustment_value}\")\n rescue RestClient::Exception => e\n # TODO: What is the appropriate behavior when this request fails?\n p e.response\n raise\n end",
"def stocktake_params\n params.require(:stocktake).permit(:stocktake_date, :material_master_id, :inventory_id, :physical_quantity, :unit_price, \n :physical_amount, :book_quantity, :book_amount, :inventory_update_flag)\n end",
"def test_result_params\n params.permit(:name, :status, :marks_earned, :output)\n end",
"def test_new_rent\n quote_details = SAMPLE_QUOTE_DETAILS.deep_dup\n quote_details['fixed_price_services_requested']['price'] = 1200\n params_hash = {\n udprn: '123456',\n services_required: SAMPLE_SERVICES_REQUIRED,\n payment_terms: SAMPLE_PAYMENT_TERMS,\n quote_details: quote_details.to_json\n }\n first_params_hash = params_hash.deep_dup\n first_params_hash[:quote_details] = SAMPLE_QUOTE_DETAILS.to_json\n post :new_quote_for_property, first_params_hash\n prev_quote_count = Agents::Branches::AssignedAgents::Quote.count\n post :new, params_hash\n assert_response 200\n assert_equal Agents::Branches::AssignedAgents::Quote.count, (prev_quote_count + 1)\n end",
"def test_post_then_get\n header 'Content-Type', 'application/json'\n\n data = File.read 'sample-traces/0.json'\n post('/traces', data, 'CONTENT_TYPE': 'application/json')\n id = last_response.body\n\n get \"/traces/#{id}\"\n check_valid_trace last_response.body\n end",
"def create\n @cooking_step = CookingStep.new(cooking_step_params)\n\n respond_to do |format|\n if @cooking_step.save\n format.html { redirect_to @cooking_step, notice: 'Cooking step was successfully created.' }\n format.json { render :show, status: :created, location: @cooking_step }\n else\n format.html { render :new }\n format.json { render json: @cooking_step.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_time_off\n @time_off = TimeOff.find(params[:id])\n end",
"def set_time_off\n @time_off = TimeOff.find(params[:id])\n end",
"def create\n @caloric_intake = CaloricIntake.new(caloric_intake_params)\n\n respond_to do |format|\n if @caloric_intake.save\n format.html { redirect_to @caloric_intake, notice: 'Caloric intake was successfully created.' }\n format.json { render :show, status: :created, location: @caloric_intake }\n else\n format.html { render :new }\n format.json { render json: @caloric_intake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n #not sure why this was here...? @trips = Trip.all\n\n @expense_report = ExpenseReport.new(expense_report_params)\n authorize @expense_report\n @expense_report.submitter = current_account.accountable\n\n @expense_report.actual_expenses.each do |exp|\n exp.requests.each do |req|\n req.amount_from_total = req.percentrequested * exp.total\n req.destination = @expense_report.trip.destination\n req.expense_type = 'actual'\n end\n end\n\n\n respond_to do |format|\n if @expense_report.save\n format.html { redirect_to home_index_path, notice: 'Expense report was successfully created.' }\n format.json { render :show, status: :created, location: @expense_report }\n else\n format.html { render :new }\n format.json { render json: @expense_report.errors, status: :unprocessable_entity }\n end\n end\n end",
"def ride_params\n params.require(:ride).permit(\n :time,\n :ride_type,\n :from_location,\n :destination,\n :info,\n :seats,\n :status\n )\n end",
"def create\n @vote = Vote.new\n set_item = SetItem.find_by(id: params[:set_item_id])\n @vote.set_item_id = params[:set_item_id]\n @vote.audience_member_id = params[:audience_member_id]\n @vote.event_id = params[:event_id]\n\n\n respond_to do |format|\n if @vote.save\n format.html { redirect_to live_event_url(set_item.event.code) }\n format.json { render action: 'show', status: :created, location: @vote }\n else\n format.html { redirect_to live_event_url(set_item.event.code) }\n format.json { render json: @vote.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sweep = Sweep.new(sweep_params)\n\n respond_to do |format|\n if @sweep.save\n format.html { redirect_to @sweep, notice: 'Sweep was successfully created.' }\n format.json { render :show, status: :created, location: @sweep }\n else\n format.html { render :new }\n format.json { render json: @sweep.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @qx_runway = Qx::Runway.new(qx_runway_params)\n\n respond_to do |format|\n if @qx_runway.save\n format.html { redirect_to @qx_runway, notice: 'Runway was successfully created.' }\n format.json { render :show, status: :created, location: @qx_runway }\n else\n format.html { render :new }\n format.json { render json: @qx_runway.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \n @intake = Intake.new(params[:intake])\n\n respond_to do |format|\n if @intake.save\n format.html { redirect_to(@intake, :notice => t('intake.title2')+\" \"+t('created')) }\n format.xml { render :xml => @intake, :status => :created, :location => @intake }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @intake.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @point_consumption = PointConsumption.new(params[:point_consumption])\n\n respond_to do |format|\n if @point_consumption.save\n format.html { redirect_to @point_consumption, :notice => 'Point consumption was successfully created.' }\n format.json { render :json => @point_consumption, :status => :created, :location => @point_consumption }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @point_consumption.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | [
"0.62888044",
"0.62379766",
"0.5595522",
"0.5462763",
"0.5343938",
"0.52733797",
"0.51407516",
"0.5062816",
"0.5062299",
"0.5022074",
"0.49795154",
"0.49691406",
"0.49611592",
"0.49168292",
"0.49149147",
"0.4893492",
"0.48847798",
"0.4867671",
"0.4867671",
"0.48537987",
"0.48537987",
"0.47997087",
"0.47946793",
"0.4788782",
"0.47794998",
"0.47637555",
"0.47610676",
"0.47438765",
"0.47392717",
"0.47385815",
"0.47338873",
"0.4729936",
"0.47051337",
"0.47008702",
"0.46987402",
"0.46928257",
"0.4688067",
"0.46861395",
"0.46608087",
"0.4656337",
"0.46491382",
"0.46406585",
"0.46389568",
"0.46273324",
"0.46233588",
"0.46205214",
"0.4617186",
"0.46167302",
"0.4615148",
"0.4612816",
"0.460616",
"0.46019334",
"0.46016484",
"0.459778",
"0.45971993",
"0.4592356",
"0.45896468",
"0.45895657",
"0.4585086",
"0.45831525",
"0.45829242",
"0.45775962",
"0.45633867",
"0.45569766",
"0.45555565",
"0.45511624",
"0.45466962",
"0.45454547",
"0.4541111",
"0.45399335",
"0.45365933",
"0.45300797",
"0.45219037",
"0.4516768",
"0.4515914",
"0.45155355",
"0.45092684",
"0.4508724",
"0.4507756",
"0.45077294",
"0.45023227",
"0.44996318",
"0.44968906",
"0.44954082",
"0.44945177",
"0.44876122",
"0.448552",
"0.4484352",
"0.44835335",
"0.44832912",
"0.44798985",
"0.44798985",
"0.4473273",
"0.44732463",
"0.44689882",
"0.44688997",
"0.44618526",
"0.4461083",
"0.44559985",
"0.444878"
] | 0.63028187 | 0 |
PATCH/PUT /qx/take_offs/1 PATCH/PUT /qx/take_offs/1.json | def update
respond_to do |format|
if @qx_take_off.update(qx_take_off_params)
format.html { redirect_to @qx_take_off, notice: 'Take off was successfully updated.' }
format.json { render :show, status: :ok, location: @qx_take_off }
else
format.html { render :edit }
format.json { render json: @qx_take_off.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n @kickoff = Kickoff.find(params[:id])\n\n respond_to do |format|\n if @kickoff.update_attributes(params[:kickoff])\n format.html { redirect_to @kickoff, notice: 'Kickoff was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kickoff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @time_off_request = TimeOffRequest.find(params[:id])\n respond_to do |format|\n if @time_off_request.update_attributes(params[:time_off_request])\n format.html { redirect_to admin_time_off_requests_url, notice: 'Time off request was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @time_off_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @pick.update_attributes(picks_params)\n format.html { redirect_to games_path, notice: 'Pick was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @pick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @take = Take.find(params[:id])\n\n respond_to do |format|\n if @take.update_attributes(params[:take])\n format.html { redirect_to @take, notice: 'Take was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @take.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @intake = Intake.find(params[:id])\n\n respond_to do |format|\n if @intake.update_attributes(params[:intake])\n format.html { redirect_to @intake, notice: 'Intake was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @intake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @taking.update(taking_params)\n format.html { redirect_to @taking, notice: 'Taking was successfully updated.' }\n format.json { render :show, status: :ok, location: @taking }\n else\n format.html { render :edit }\n format.json { render json: @taking.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(params)\n res = @client.put(path, nil, params, \"Content-Type\" => \"application/json\")\n @attributes = res.json if res.status == 201\n res\n end",
"def update!(**args)\n @requests = args[:requests] if args.key?(:requests)\n end",
"def update!(**args)\n @requests = args[:requests] if args.key?(:requests)\n end",
"def update!(**args)\n @requests = args[:requests] if args.key?(:requests)\n end",
"def update!(**args)\n @requests = args[:requests] if args.key?(:requests)\n end",
"def update\n update_resource @ride, ride_params\n end",
"def update\n \n #在庫マスターのIDをセットする\n update_flag = 1\n\tset_inventories_id\n \n respond_to do |format|\n if @stocktake.update(stocktake_params)\n format.html { redirect_to @stocktake, notice: 'Stocktake was successfully updated.' }\n format.json { render :show, status: :ok, location: @stocktake }\n else\n format.html { render :edit }\n format.json { render json: @stocktake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @ticket.update_attributes(params[:ticket])\n\n respond_with(@story, @ticket)\n end",
"def UpdateTicket params = {}\n \n APICall(path: 'tickets.json',method: 'PUT',payload: params.to_json)\n \n end",
"def patch!\n request! :patch\n end",
"def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def put!\n request! :put\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @status_detail = args[:status_detail] if args.key?(:status_detail)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @status_detail = args[:status_detail] if args.key?(:status_detail)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @status_detail = args[:status_detail] if args.key?(:status_detail)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @status_detail = args[:status_detail] if args.key?(:status_detail)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @status_detail = args[:status_detail] if args.key?(:status_detail)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @status_detail = args[:status_detail] if args.key?(:status_detail)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update\n @point_consumption = PointConsumption.find(params[:id])\n\n respond_to do |format|\n if @point_consumption.update_attributes(params[:point_consumption])\n format.html { redirect_to @point_consumption, :notice => 'Point consumption was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @point_consumption.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @pick.update(pick_params)\n format.html { redirect_to @pick, notice: 'Pick was successfully updated.' }\n format.json { render :show, status: :ok, location: @pick }\n else\n format.html { render :edit }\n format.json { render json: @pick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @getoff_info = GetoffInfo.find(params[:id])\n\n respond_to do |format|\n if @getoff_info.update_attributes(params[:getoff_info])\n format.html { redirect_to @getoff_info, :notice => 'Getoff info was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @getoff_info.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def UpdateView params = {}\n \n APICall(path: 'views.json',method: 'PUT',payload: params.to_json)\n \n end",
"def update\n @touchpoint = Touchpoint.find(params[:id])\n\n respond_to do |format|\n if @touchpoint.update_attributes(params[:touchpoint])\n format.html { redirect_to @touchpoint, notice: 'Touchpoint was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @touchpoint.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @cooking_time.update(cooking_time_params)\n format.html { redirect_to cooking_times_path, notice: 'Cooking time was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @cooking_time.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @preceed = Preceed.find(params[:id])\n\n respond_to do |format|\n if @preceed.update_attributes(params[:preceed])\n format.html { redirect_to @preceed, notice: 'Preceed was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @preceed.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @trick = Trick.find(params[:id])\n\n respond_to do |format|\n if @trick.update_attributes(params[:trick])\n format.html { redirect_to @trick, notice: 'Trick was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @trick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @tips_trick.update_attributes(params[:tips_trick])\n format.html { redirect_to @tips_trick, notice: 'Tips trick was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @tips_trick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @step = TaskrequestsStep.find(params[:taskrequests_step_id])\n @absence_request = @step.absence_requests.find(params[:id])\n\n respond_to do |format|\n if @absence_request.update_attributes(params[:absence_request])\n format.html { redirect_to(@absence_request, :notice => 'Absence request was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @absence_request.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @tick.update(tick_params)\n format.html { redirect_to @tick, notice: 'Tick was successfully updated.' }\n format.json { render :show, status: :ok, location: @tick }\n else\n format.html { render :edit }\n format.json { render json: @tick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @hold_request.update(hold_request_params)\n format.html { redirect_to @hold_request, notice: 'Hold request was successfully updated.' }\n format.json { render :show, status: :ok, location: @hold_request }\n else\n format.html { render :edit }\n format.json { render json: @hold_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @caloric_intake.update(caloric_intake_params)\n format.html { redirect_to @caloric_intake, notice: 'Caloric intake was successfully updated.' }\n format.json { render :show, status: :ok, location: @caloric_intake }\n else\n format.html { render :edit }\n format.json { render json: @caloric_intake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @intake = Intake.find(params[:id])\n\n respond_to do |format|\n if @intake.update_attributes(params[:intake])\n format.html { redirect_to(@intake, :notice => t('intake.title2')+\" \"+t('updated')) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @intake.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n\t\trespond_to do |format|\n\t\t\tif @ride.update(offer_1_ride_params)\n\t\t\t\t@ride.set_routes\n\t\t\t\t@ride.handle_return_date_and_recurring_weeks\n\t\t\t\tformat.html { redirect_to \"/#{@ride.id}/offer-seats/2\" }\n\t\t\t\tformat.json { render :show, status: :created, location: @ride }\n\t\t\telse\n\t\t\t\tformat.html { render :edit }\n\t\t\t\tformat.json { render json: @ride.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def patch\n headers = {\"If-Match\" => @version}\n response = @context.request :patch, \"#{@path}/#{@id}\", @data.to_json, headers\n @version += 1\n response\n # 'X-HTTP-Method-Override' => 'PATCH'\n end",
"def update\n authorize @trip\n\n\n @trip.estimated_expenses.each do |exp|\n exp.requests.each do |req|\n req.amount_from_total = req.percentrequested * exp.total\n req.destination = @trip.destination\n req.expense_type = 'estimated'\n end\n end\n\n respond_to do |format|\n if @trip.update(trip_params)\n format.html { redirect_to @trip, notice: 'Trip was successfully updated.' }\n format.json { render :show, status: :ok, location: @trip }\n else\n format.html { render :edit }\n format.json { render json: @trip.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @sculpture.update(sculpture_params)\n format.html { redirect_to @sculpture, notice: 'Sculpture was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @sculpture.errors, status: :unprocessable_entity }\n end\n end\n end",
"def save\n @client.patch(@endpoint, :content=>@changed)\n return nil\n end",
"def update\n @timing = Timing.find(params[:id])\n if @timing.update_attributes(params[:timing].slice(:start, :stop, :days, :active))\n render json: @timing\n else\n render json: { error: 'error: could not update timing' }\n end\n end",
"def update\n respond_to do |format|\n if @ride_request.update(ride_request_params)\n format.html { redirect_to @ride_request, notice: 'Ride request was successfully updated.' }\n format.json { render :show, status: :ok, location: @ride_request }\n else\n format.html { render :edit }\n format.json { render json: @ride_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @taker = Taker.find(params[:id])\n\n respond_to do |format|\n if @taker.update_attributes(params[:taker])\n format.html { redirect_to @taker, notice: 'Taker was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @taker.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @kit = Kit.find(params[:id])\n\n respond_to do |format|\n if @kit.update_attributes(params[:kit])\n format.html { redirect_to @kit, notice: 'Kit was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kit.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @order_pick = OrderPick.find(params[:id])\n\n respond_to do |format|\n if @order_pick.update_attributes(params[:order_pick])\n format.html { redirect_to @order_pick, notice: 'Order pick was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @order_pick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @pickup = Pickup.find(params[:id])\n\n respond_to do |format|\n if @pickup.update_attributes(params[:pickup])\n format.html { redirect_to @pickup, notice: 'Pickup was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @pickup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @intake_question = IntakeQuestion.find(params[:id])\n\n respond_to do |format|\n if @intake_question.update_attributes(params[:intake_question])\n format.html { redirect_to @intake_question, notice: 'Intake question was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @intake_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @ticket.update(ticket_params)\n @ticket.update_responsible(:status, 'Waiting for Staff Response')\n format.html { redirect_to @ticket, notice: 'Ticket was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @ticket.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update options={}\n client.put(\"/#{id}\", options)\n end",
"def update\n @coff = Coff.find(params[:id])\n\n respond_to do |format|\n if @coff.update_attributes(params[:coff])\n format.html { redirect_to @coff, notice: 'Coff was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @coff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @api_v1_exercise.update(api_v1_exercise_params)\n format.html { redirect_to @api_v1_exercise, notice: 'Exercise was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_exercise }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_exercise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update # PATCH\n raise NotImplementedError\n end",
"def update\n respond_to do |format|\n if @stoff.update(stoff_params)\n format.html { redirect_to @stoff }\n format.json { render :show, status: :ok, location: @stoff }\n else\n format.html { render :edit }\n format.json { render json: @stoff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @feat = @person.feats.find(params[:id])\n level_old = @person.level\n\n if params[:feat][:completed] == '1'\n @feat.complete\n else\n @feat.uncomplete\n end\n sign = params[:feat][:completed] == '1' ? '+': '-'\n \n has_leveled = @person.level > level_old\n\n respond_to do |format|\n format.json { render :json => {\n :xpGained => \"#{sign}#{@feat.xp}\",\n :xpTotal => @person.xp,\n :next_level_ratio => @person.next_level_ratio,\n :extra_life => @person.level_to_string,\n :has_leveled => has_leveled,\n :completed => @feat.completed,\n :streak => @feat.calculate_streak}}\n \n end\n\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update\n respond_to do |format|\n if @youbride.update(youbride_params)\n format.html { redirect_to @youbride, notice: 'Youbride was successfully updated.' }\n format.json { render :show, status: :ok, location: @youbride }\n else\n format.html { render :edit }\n format.json { render json: @youbride.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @thing = Thing.find(params[:id])\n params[:thing][:place_id] = place_id_from_form\n params[:thing][:owner_ids] = [ ] if params[:thing][:owner_ids].nil?\n params[:thing][:keeper_ids] = [ ] if params[:thing][:keeper_ids].nil?\n\n respond_to do |format|\n if @thing.update_attributes(params[:thing])\n format.html { redirect_to(@thing, :notice => 'Thing was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @thing.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n logger.info { \"PARAMS: #{params.inspect}\" }\n project_id, id = params[:id].split('-')\n ticket = Lighthouse::Ticket.find(id, :params => {:project_id => project_id})\n \n # insanely hacky. can't nest json, so don't want to do a willy-nilly merge.\n # move mergeable params to the [:ticket] hash to follow usual rails conventions\n # before merging\n params[:ticket] = {}\n %w(assigned_user_id state milestone_id).each do |field|\n params[:ticket].merge!( field => params.delete(field) ) if params[field]\n end\n logger.info { \"TICKET ATTRS TO UPDATE: #{params[:ticket].inspect}\"}\n \n ticket.attributes.merge!( params[:ticket] )\n ticket.save\n\n respond_to do |format|\n # if @ticket.update_attributes(params[:ticket])\n # flash[:notice] = 'Ticket was successfully updated.'\n # format.html { redirect_to(@ticket) }\n # format.xml { head :ok }\n # else\n # format.html { render :action => \"edit\" }\n # format.xml { render :xml => @ticket.errors, :status => :unprocessable_entity }\n # end\n end\n end",
"def update\n\n @intake.title = @intake.course.title + \": \" + @intake.start_date.strftime(\"%b, %Y\")\n respond_to do |format|\n if @intake.update_attributes(params[:intake])\n format.html { redirect_to @intake, notice: 'Intake was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @intake.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @request_for_change.set_manager(force: true)\n @request_for_change.set_security_officer(force: true)\n\n respond_to do |format|\n if @request_for_change.update(request_for_change_params)\n format.html { redirect_to edit_request_for_change_path(@request_for_change), notice: 'Request for change was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @request_for_change.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @one_time_stop = OneTimeStop.find(params[:id])\n\n respond_to do |format|\n if @one_time_stop.update_attributes(params[:one_time_stop])\n format.html { redirect_to @one_time_stop, notice: 'One time stop was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @one_time_stop.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @tick_track = TickTrack.find(params[:id])\n\n respond_to do |format|\n if @tick_track.update_attributes(params[:tick_track])\n format.html { redirect_to tick_tracks_url }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @tick_track.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update(url, data)\n RestClient.put url, data, :content_type => :json\nend",
"def update\n @ticket.update(ticket_params)\n\n end",
"def update\n respond_to do |format|\n if @check_point.update(check_point_params)\n format.html { redirect_to @check_point, notice: 'Check point was successfully updated.' }\n format.json { render :show, status: :ok, location: @check_point }\n else\n format.html { render :edit }\n format.json { render json: @check_point.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @cooking_step.update(cooking_step_params)\n format.html { redirect_to @cooking_step, notice: 'Cooking step was successfully updated.' }\n format.json { render :show, status: :ok, location: @cooking_step }\n else\n format.html { render :edit }\n format.json { render json: @cooking_step.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n set_user\n set_time_off\n respond_to do |format|\n if @time_off.update(time_off_params)\n format.html { redirect_to user_time_off_path(@user, @time_off), notice: 'Time off was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @time_off.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @kit.update(kit_params)\n format.html { redirect_to @kit, notice: 'Kit was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @kit.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n thing_name = params[:id]\n is_on = params[:on].to_i\n client = Broker::Wrapper.new(current_user, thing_name)\n result = client.set_state(is_on)\n\n result_hash = result.as_json\n render json: result_hash, status: 200\n end",
"def update\r\n respond_to do |format|\r\n if @way_point.update(way_point_params)\r\n format.html { redirect_to @way_point, notice: 'Way point was successfully updated.' }\r\n format.json { head :no_content }\r\n else\r\n format.html { render action: 'edit' }\r\n format.json { render json: @way_point.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update\n @client_need = ClientNeed.find(params[:id])\n\n respond_to do |format|\n if @client_need.update_attributes(params[:client_need])\n format.html { redirect_to @client_need, notice: 'Client need was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @client_need.errors, status: :unprocessable_entity }\n end\n end\n end",
"def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end",
"def update\n @shot = Shot.find(params[:id])\n @shot.update_attributes(shot_params)\n respond_with @shot\n end",
"def update\n respond_to do |format|\n if @playoff.update(playoff_params)\n format.html { redirect_to @playoff, notice: 'Playoff was successfully updated.' }\n format.json { render :show, status: :ok, location: @playoff }\n else\n format.html { render :edit }\n format.json { render json: @playoff.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @safe_cooking_temp.update(safe_cooking_temp_params)\n format.html { redirect_to @safe_cooking_temp, notice: 'Safe cooking temp was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @safe_cooking_temp.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @way_point.update(way_point_params)\n format.html { redirect_to @way_point, notice: 'Way point was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @way_point.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @get_in_touch.update(get_in_touch_params)\n format.html { redirect_to @get_in_touch, notice: 'Get in touch was successfully updated.' }\n format.json { render :show, status: :ok, location: @get_in_touch }\n else\n format.html { render :edit }\n format.json { render json: @get_in_touch.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_qx_take_off\n @qx_take_off = Qx::TakeOff.find(params[:id])\n end",
"def update!(**args)\n @api_version = args[:api_version] if args.key?(:api_version)\n @create_time = args[:create_time] if args.key?(:create_time)\n @end_time = args[:end_time] if args.key?(:end_time)\n @requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)\n @status_message = args[:status_message] if args.key?(:status_message)\n @target = args[:target] if args.key?(:target)\n @validation_result = args[:validation_result] if args.key?(:validation_result)\n @verb = args[:verb] if args.key?(:verb)\n end",
"def update\n @calorie_intake = CalorieIntake.find(params[:id])\n\n respond_to do |format|\n if @calorie_intake.update_attributes(params[:calorie_intake])\n format.html { redirect_to(@calorie_intake, :notice => 'CalorieIntake was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @calorie_intake.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @super_bowl_pick = SuperBowlPick.find(params[:id])\n\n respond_to do |format|\n if @super_bowl_pick.update_attributes(params[:super_bowl_pick])\n format.html { redirect_to @super_bowl_pick, notice: 'Super bowl pick was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @super_bowl_pick.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n @past_request.received_help = true\n if @past_request.update(past_request_params)\n format.html { redirect_to requests_path, notice: \"You spent #{time_ago_in_words(Time.now - (Time.now - @past_request.created_at), include_seconds: true)}\" }\n format.json { render :show, status: :ok, location: @past_request }\n else\n format.html { render :edit }\n format.json { render json: @past_request.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.61186934",
"0.60202146",
"0.5990585",
"0.5884832",
"0.5873944",
"0.5863343",
"0.5852479",
"0.58492684",
"0.58492684",
"0.58492684",
"0.58492684",
"0.5827921",
"0.58020145",
"0.5784272",
"0.57669574",
"0.5726589",
"0.5726461",
"0.5709916",
"0.5709825",
"0.5709825",
"0.5709825",
"0.5709825",
"0.5709825",
"0.5709825",
"0.57095516",
"0.5704436",
"0.5700554",
"0.56864965",
"0.56851214",
"0.5669574",
"0.5657235",
"0.56560963",
"0.56526214",
"0.5647648",
"0.56446946",
"0.5639509",
"0.5636486",
"0.5636322",
"0.561082",
"0.5601456",
"0.5590095",
"0.5579568",
"0.5576273",
"0.5573024",
"0.5572226",
"0.557151",
"0.55647975",
"0.55593187",
"0.5554447",
"0.5554387",
"0.5551793",
"0.55463386",
"0.5540584",
"0.5537139",
"0.55320686",
"0.5530975",
"0.5524955",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.552202",
"0.55212426",
"0.55185616",
"0.5513678",
"0.5509099",
"0.5500185",
"0.54970396",
"0.5494398",
"0.54929024",
"0.5491802",
"0.5491019",
"0.54896796",
"0.54844207",
"0.5475736",
"0.54739636",
"0.5472279",
"0.54713166",
"0.54712",
"0.54711646",
"0.5470679",
"0.5468099",
"0.5465003",
"0.5460459",
"0.5459428",
"0.5459037",
"0.545848",
"0.5457656",
"0.5456167"
] | 0.70002294 | 0 |
DELETE /qx/take_offs/1 DELETE /qx/take_offs/1.json | def destroy
@qx_take_off.destroy
respond_to do |format|
format.html { redirect_to qx_take_offs_url, notice: 'Take off was successfully destroyed.' }
format.json { head :no_content }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_del\n header 'Content-Type', 'application/json'\n\n data = File.read 'sample-traces/0.json'\n post('/traces', data, 'CONTENT_TYPE': 'application/json')\n\n id = last_response.body\n\n delete \"/traces/#{id}\"\n assert last_response.ok?\n\n get \"/traces/#{id}\"\n\n contents = JSON.parse last_response.body\n assert_kind_of(Hash, contents, 'Response contents is not a hash')\n assert contents.key? 'description'\n assert(!last_response.ok?)\n end",
"def cmd_delete argv\n setup argv\n uuid = @hash['uuid']\n response = @api.delete(uuid)\n msg response\n return response\n end",
"def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend",
"def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def delete\n client.delete(\"/#{id}\")\n end",
"def destroy\n @intake.destroy\n respond_to do |format|\n format.html { redirect_to intakes_url }\n format.json { head :no_content }\n end\n end",
"def delete\n client.delete(url)\n @deleted = true\nend",
"def cmd_delete argv\n setup argv\n e = @hash['element']\n response = @api.delete(e)\n msg response\n return response\n end",
"def delete_json(path)\n url = [base_url, path].join\n resp = HTTParty.delete(url, headers: standard_headers)\n parse_json(url, resp)\n end",
"def delete(path)\n RestClient.delete request_base+path\n end",
"def delete!( opts = {} )\n http_action :delete, nil, opts\n end",
"def destroy\n @hold_request.destroy\n respond_to do |format|\n format.html { redirect_to hold_requests_url, notice: 'Hold request was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @take = Take.find(params[:id])\n @take.destroy\n\n respond_to do |format|\n format.html { redirect_to takes_url }\n format.json { head :ok }\n end\n end",
"def delete\n api_client.delete(url)\n end",
"def destroy\n @time_off_request = TimeOffRequest.find(params[:id])\n @time_off_request.destroy\n \n respond_to do |format|\n format.html { redirect_to admin_time_off_requests_url}\n format.json { head :ok }\n end\n end",
"def destroy\n @kickoff = Kickoff.find(params[:id])\n @kickoff.destroy\n\n respond_to do |format|\n format.html { redirect_to kickoffs_url }\n format.json { head :no_content }\n end\n end",
"def delete\n res = HTTParty.get URL, headers: HEADERS\n message = JSON.parse res.body, symbolize_names: true\n if res.code == 200\n numSubs = message[:data].count\n if numSubs > 0\n message[:data].each do |sub|\n id = sub[:id]\n delRes = HTTParty.delete \"#{URL}/#{id}\", headers: HEADERS\n #TODO handle status codes\n end\n end\n end\n end",
"def destroy\n @intake = Intake.find(params[:id])\n @intake.destroy\n\n respond_to do |format|\n format.html { redirect_to(intakes_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @myb_evl_intake.destroy\n respond_to do |format|\n format.html { redirect_to myb_evl_intakes_url, notice: 'Myb evl intake was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete\n response = WebPay.client.delete(path)\n response['deleted']\n end",
"def destroy\n @intake = Intake.find(params[:id])\n @intake.destroy\n\n respond_to do |format|\n flash[:success] = \"Intake successfully detroyed!\"\n format.html { redirect_to intakes_url }\n format.json { head :no_content }\n end\n end",
"def delete path\n make_request(path, \"delete\", {})\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def delete()\n @api.do_request(\"DELETE\", get_base_api_path())\n end",
"def deleteExecution(execution_id)\n uri = URI(RUNDECKSERVER + ':' + RUNDECKPORT + '/api/12/execution/' + execution_id)\n http = Net::HTTP.new(uri.host, uri.port)\n headers = {'Content-Type'=> 'application/jsonr','X-RunDeck-Auth-Token'=> API_KEY }\n r = http.delete(uri.path, headers) \n return r\nend",
"def destroy\n RestClient.delete \"#{REST_API_URI}/contents/#{id}.xml\" \n self\n end",
"def destroy\n @lineup = Lineup.find(params[:id])\n @lineup.destroy\n\n respond_to do |format|\n format.html { redirect_to lineups_url }\n format.json { head :no_content }\n end\n end",
"def delete\n request(:delete)\n end",
"def destroy\n @line = Line.find(params[:id])\n @line.destroy\n\n respond_to do |format|\n format.html { redirect_to budget_path(@line.budget) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @treq = Treq.find(params[:id])\n @treq.destroy\n\n respond_to do |format|\n format.html { redirect_to treqs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @line_item1 = LineItem1.find(params[:id])\n @line_item1.destroy\n\n respond_to do |format|\n format.html { redirect_to line_item1s_url }\n format.json { head :no_content }\n end\n end",
"def delete\n url = prefix + \"delete\" + id_param\n return response(url)\n end",
"def destroy\n @open_shoot = OpenShoot.find(params[:id])\n @open_shoot.destroy\n\n respond_to do |format|\n format.html { redirect_to open_shoots_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @reloud_check = ReloudCheck.find(params[:id])\n @reloud_check.destroy\n\n respond_to do |format|\n format.html { redirect_to reloud_checks_url }\n format.json { head :no_content }\n end\n end",
"def delete(*args)\n request(:delete, *args)\n end",
"def cancelReservation(lease_uuid)\n broker_url = APP_CONFIG['broker_ip'] + ':' + APP_CONFIG['broker_port'].to_s \n cert_path = APP_CONFIG['cert_path']\n \n header = {\"Content-Type\" => \"application/json\"}\n options = {uuid: lease_uuid}\n\n #puts options.to_json \n uri = URI.parse(broker_url+\"/resources/leases\")\n pem = File.read(cert_path)\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n http.cert = OpenSSL::X509::Certificate.new(pem)\n http.key = OpenSSL::PKey::RSA.new(pem)\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\n request = Net::HTTP::Delete.new(uri.request_uri, header)\n request.body = options.to_json\n\n response = http.request(request)\n puts response\n if response.header.code != '200'\n puts \"Something went wrong\"\n puts response\n end\n end",
"def destroy\n @hold_request.destroy\n respond_to do |format|\n format.html { redirect_to student_requests_path, notice: 'Hold request was deleted.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @qx_runway.destroy\n respond_to do |format|\n format.html { redirect_to qx_runways_url, notice: 'Runway was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete(*rest) end",
"def destroy\n @caloric_intake.destroy\n respond_to do |format|\n format.html { redirect_to caloric_intakes_url, notice: 'Caloric intake was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete\n url = prefix + \"delete\"\n return response(url)\n end",
"def delete\n url = prefix + \"delete\"\n return response(url)\n end",
"def destroy\n id = params[:id]\n @physical_rack = PhysicalRack.any_of({_id: id}, {name: id.gsub('-', '.')}).first\n @physical_rack.destroy\n\n respond_to do |format|\n format.html { redirect_to physical_racks_url }\n format.json { head :ok }\n end\n end",
"def delete endpoint\n do_request :delete, endpoint\n end",
"def destroy\n @client_need = ClientNeed.find(params[:id])\n @client_need.destroy\n\n respond_to do |format|\n format.html { redirect_to client_needs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @beattape = Beattape.find(params[:id])\n @beattape.destroy\n\n respond_to do |format|\n format.html { redirect_to beattapes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @point_consumption = PointConsumption.find(params[:id])\n @point_consumption.destroy\n\n respond_to do |format|\n format.html { redirect_to point_consumptions_url }\n format.json { head :no_content }\n end\n end",
"def delete(path, params)\n parse_response @client[path].delete(:params => params)\n end",
"def delete\n sellID = params[:sell_id]\n\n uri = URI(\"http://107.170.7.58:4567/api/delete/sell\")\n parameters = {\"ext\" => \"json\", \"id\" => sellID}\n response = Net::HTTP.post_form(uri, parameters)\n list = JSON.parse(response.body)\n\n @response = list[0][\"kind\"]\n end",
"def destroy\n @my_time_trial = MyTimeTrial.find(params[:id])\n @my_time_trial.destroy\n\n respond_to do |format|\n format.html { redirect_to my_time_trials_url }\n format.json { head :no_content }\n end\n end",
"def deleteEntityOpening_times( entity_id)\n params = Hash.new\n params['entity_id'] = entity_id\n return doCurl(\"delete\",\"/entity/opening_times\",params)\n end",
"def delete(*args)\n prepare_request(:delete, args)\n @@client.add(:delete, @path, *args)\n end",
"def destroy\n @cooking_time.destroy\n respond_to do |format|\n format.html { redirect_to cooking_times_url }\n format.json { head :no_content }\n end\n end",
"def delete_data\n response = WebPay.client.delete([path, 'data'].join('/'))\n response['deleted']\n end",
"def destroy\n @measure = Measure.find(params[:id])\n @measure.destroy\n\n respond_to do |format|\n format.html { redirect_to measures_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @calorie_intake = CalorieIntake.find(params[:id])\n @calorie_intake.destroy\n respond_to do |format|\n format.html { redirect_to(calorie_intakes_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @wait = Wait.find(params[:id])\n @wait.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def delete!\n request! :delete\n end",
"def delete\n ruta = \"/actions/#{action_id}\"\n client.delete(ruta)\n end",
"def destroy\n puts @iot_datum.count\n if @iot_datum.count > 0\n @deleted_rec = IotDatum.new\n @deleted_rec.workbench_number = @iot_datum.workbench_number\n @deleted_rec.part_number = @iot_datum.part_number\n @deleted_rec.target = @iot_datum.target\n @deleted_rec.lot_size = @iot_datum.lot_size\n @deleted_rec.employee_name = @iot_datum.employee_name\n @deleted_rec.shift = @iot_datum.shift\n @deleted_rec.device_id = @iot_datum.device_id\n @deleted_rec.count = @iot_datum.count\n @deleted_rec.status = 'Deleted'\n @deleted_rec.save!\n @iot_datum.destroy\n else\n @iot_datum.destroy\n end\n respond_to do |format|\n format.html { redirect_to iot_data_url, notice: 'Planner was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete\n RestClient.delete(url, @header) do |rso, req, res|\n setup(rso, req, res)\n end\n end",
"def destroy\n @takeout.destroy\n respond_to do |format|\n format.html { redirect_to takeouts_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @coff = Coff.find(params[:id])\n @coff.destroy\n\n respond_to do |format|\n format.html { redirect_to coffs_url }\n format.json { head :no_content }\n end\n end",
"def deleteRequest\n\n end",
"def destroy\n @taxirequest = Taxirequest.find(params[:id])\n @taxirequest.destroy\n\n respond_to do |format|\n format.html { redirect_to taxirequests_url }\n format.json { head :no_content }\n end\n end",
"def test_stop_following_a_user\r\n # delete \"/follows/destroy.json\", :api_key => 'testapikey',\r\n # :followee_id => 3\r\n # assert_response :success\r\n end",
"def destroy\n @k_measurement.destroy\n respond_to do |format|\n format.html { redirect_to k_measurements_url }\n format.json { head :no_content }\n end\n end",
"def do_delete(uri = \"\")\n @connection.delete do |req|\n req.url uri\n req.headers['Content-Type'] = 'application/json'\n end\n end",
"def delete_and_give_me_a_json(additional_path, params = nil)\n if self.service_base_path != nil\n if params == nil\n params = Hash.new\n end\n params[:api_key] = self.access_token\n message = self.http_client.delete \"#{self.base_url}#{self.service_base_path}/#{additional_path}.json\", params\n trata_erro(message.content)\n end\n end",
"def destroy\n @loud_check = LoudCheck.find(params[:id])\n @loud_check.destroy\n\n respond_to do |format|\n format.html { redirect_to loud_checks_url }\n format.json { head :no_content }\n end\n end",
"def delete_kit(id)\n resp = make_request :delete, \"kits/#{id}\"\n check_response_for_field resp, \"ok\"\n end",
"def destroy\n @getoff_info = GetoffInfo.find(params[:id])\n @getoff_info.destroy\n\n respond_to do |format|\n format.html { redirect_to getoff_infos_url }\n format.json { head :no_content }\n end\n end",
"def api_delete(path, data = {})\n api_request(:delete, path, :data => data)\n end",
"def destroy\n @kit = Kit.find(params[:id])\n @kit.destroy\n\n respond_to do |format|\n format.html { redirect_to kits_url }\n format.json { head :ok }\n end\n end",
"def destroy\n return if new_record?\n \n @api.delete \"/items/#{shortcode_url}.json\"\n end",
"def destroy\n @coin_set = CoinSet.find(params[:id])\n @coin_set.destroy\n\n respond_to do |format|\n format.html { redirect_to coin_sets_url }\n format.json { head :ok }\n end\n end",
"def destroy\n\n\t\turi = URI.parse(Counter::Application.config.simplyurl)\n\t\thttp = Net::HTTP.new(uri.host, uri.port)\n\t\t\n\t\trequest = Net::HTTP::Delete.new('/offsets/doit.json')\n\t\tputs params\n\t\tputs params.slice(*['custids','acctids'])\n\t\t\n\t\t# ok, this join stuff is bogus - it encodes properly, but the other side only sees the last element and loses the array type - it's just string\n\t\t# this way, i 'split' it at the other side to recover my array\n\t\t# it should work without the join/split crap, but it doesn't\n\t\trequest.set_form_data({:custids => ( params['custids'] || []).join(','), :acctids => ( params['acctids'] || []).join(','), :itemids => ( params['itemids'] || []).join(',')})\n\t\t\n\t\tputs request.body\n\t\t\n\t\tresponse = http.request(request)\n\t\tputs response.body\n\n respond_to do |format|\n format.html { render :text => response.code == :ok ? \"\" : response.body, status: response.code }\n format.json { render :text => response.code == :ok ? \"\" : response.body, status: response.code }\n end\n end",
"def destroy\n @shot = Shot.find(params[:id])\n @shot.destroy\n\n respond_to do |format|\n format.html { redirect_to(shots_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @storyline = Storyline.find(params[:id])\n @storyline.destroy\n\n respond_to do |format|\n format.html { redirect_to storylines_url }\n format.json { head :no_content }\n end\n end",
"def delete\n client.delete(url)\n @deleted = true\n end",
"def destroy_rest\n @item_usage = ItemUsage.find(params[:id])\n @item_usage.destroy\n\n respond_to do |format|\n format.html { redirect_to(item_usages_url) }\n format.xml { head :ok }\n end\n end",
"def delete datapoints\n datapoints = [*datapoints]\n datapoints.each do |dp|\n @user.delete \"/users/me/goals/#{@slug}/datapoints/#{dp.id}.json\"\n end\n end",
"def destroy\n @daily_intake.destroy\n respond_to do |format|\n format.html { redirect_to daily_intakes_url }\n format.json { head :no_content }\n end\n end",
"def deleteEntityTestimonial( entity_id, gen_id)\n params = Hash.new\n params['entity_id'] = entity_id\n params['gen_id'] = gen_id\n return doCurl(\"delete\",\"/entity/testimonial\",params)\n end",
"def destroy\n @click_thru = ClickThru.find(params[:id])\n @click_thru.destroy\n\n respond_to do |format|\n format.html { redirect_to click_thrus_url }\n format.json { head :no_content }\n end\n end",
"def delete_demo(id)\n delete_record \"/demos/#{id}\"\n end",
"def destroy\n @hot_water_demand = HotWaterDemand.find(params[:id])\n @hot_water_demand.destroy\n\n respond_to do |format|\n format.html { redirect_to hot_water_demands_url }\n format.json { head :no_content }\n end\n end",
"def delete(*args)\n Request.delete(*args)\n end",
"def destroy\n @livestock = Livestock.find(params[:id])\n @livestock.destroy\n\n respond_to do |format|\n format.html { redirect_to livestocks_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @one_time_stop = OneTimeStop.find(params[:id])\n @one_time_stop.destroy\n\n respond_to do |format|\n format.html { redirect_to one_time_stops_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sculpture.destroy\n respond_to do |format|\n format.html { redirect_to sculptures_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @qa_client = QaClient.find(params[:id])\n @qa_client.destroy\n\n respond_to do |format|\n format.html { redirect_to qa_clients_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @food_intake_log = FoodIntakeLog.find(params[:id])\n @food_intake_log.destroy\n\n respond_to do |format|\n format.html { redirect_to food_intake_logs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @datapoint.destroy\n respond_to do |format|\n format.html { redirect_to datapoints_url }\n format.json { head :no_content }\n end\n end",
"def delete(resource)\n headers = base_headers.merge('Content-Type' => 'application/json')\n url = \"#{@base_url}/#{resource}\"\n\n @logger.debug(\"DELETE request Url: #{url}\")\n @logger.debug(\"-- Headers: #{headers}\")\n\n x = HTTParty.delete(url, headers: headers)\n puts x.inspect\n x\n end",
"def destroy\n url = 'https://casa-core.herokuapp.com/api/units/' + params[:id]\n response = HTTParty.delete(url, :headers => { \"Authorization\" => AUTH, \"Host\" => HOST})\n redirect_to units_url, notice: 'Unit was successfully deleted.'\n end",
"def destroy\n @wait_time = WaitTime.find(params[:id])\n @wait_time.destroy\n\n respond_to do |format|\n format.html { redirect_to wait_times_url }\n format.json { head :no_content }\n end\n end",
"def delete(url, headers = {})\n http :delete, \"#{url}.json\", headers\n end"
] | [
"0.6665254",
"0.6639267",
"0.64180845",
"0.6404461",
"0.63492167",
"0.63488585",
"0.62898624",
"0.6282538",
"0.6251904",
"0.6247144",
"0.62224615",
"0.6197994",
"0.61745656",
"0.6173084",
"0.61641586",
"0.61477196",
"0.61431575",
"0.6116767",
"0.6110208",
"0.6104028",
"0.6102937",
"0.6095258",
"0.6090218",
"0.6090218",
"0.6090218",
"0.6090218",
"0.6084143",
"0.60762",
"0.60650635",
"0.6050849",
"0.6049528",
"0.6039022",
"0.6038933",
"0.6035024",
"0.6033226",
"0.60306007",
"0.6027666",
"0.6027092",
"0.6025828",
"0.60162723",
"0.60153836",
"0.6014766",
"0.6012666",
"0.6012666",
"0.6009979",
"0.600788",
"0.6006255",
"0.60052013",
"0.6004207",
"0.6002729",
"0.59929097",
"0.59702694",
"0.59689754",
"0.5968652",
"0.596726",
"0.59616613",
"0.5953232",
"0.59486645",
"0.5946706",
"0.5935498",
"0.5934445",
"0.59333944",
"0.59299564",
"0.59282637",
"0.5928221",
"0.5926242",
"0.59249437",
"0.5922972",
"0.5922775",
"0.59215766",
"0.592118",
"0.59175605",
"0.59154147",
"0.5911922",
"0.5907046",
"0.59044856",
"0.5897286",
"0.58955115",
"0.58951116",
"0.58886975",
"0.5886563",
"0.5884173",
"0.5880288",
"0.587904",
"0.58781105",
"0.5878071",
"0.5877182",
"0.5875262",
"0.58746606",
"0.58738387",
"0.5869696",
"0.586828",
"0.5867315",
"0.5865261",
"0.5861739",
"0.5860193",
"0.5857438",
"0.5855582",
"0.58544147",
"0.5850951"
] | 0.713365 | 0 |
Use callbacks to share common setup or constraints between actions. | def set_qx_take_off
@qx_take_off = Qx::TakeOff.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def qx_take_off_params
params.require(:qx_take_off).permit(:airport_id, :runway, :aircraft_type, :hirl_rcls, :hirl, :hirl_rcls_stop, :lights)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n end",
"def param_whitelist\n [:role, :title]\n end",
"def expected_permitted_parameter_names; end",
"def safe_params\n params.except(:host, :port, :protocol).permit!\n end",
"def strong_params\n params.require(:team_member).permit(param_whitelist)\n end",
"def permitir_parametros\n \t\tparams.permit!\n \tend",
"def strong_params\n params.require(:community).permit(param_whitelist)\n end",
"def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end",
"def strong_params\n params.require(:education).permit(param_whitelist)\n end",
"def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end",
"def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end",
"def param_whitelist\n [:rating, :review]\n end",
"def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end",
"def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end",
"def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end",
"def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end",
"def valid_params_request?; end",
"def strong_params\n params.require(:experience).permit(param_whitelist)\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end",
"def allowed_params\n params.require(:allowed).permit(:email)\n end",
"def permitted_params\n []\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end",
"def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend",
"def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end",
"def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end",
"def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end",
"def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end",
"def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end",
"def safe_params\n params.require(:user).permit(:name)\n end",
"def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend",
"def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end",
"def check_params; true; end",
"def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end",
"def quote_params\n params.permit!\n end",
"def valid_params?; end",
"def paramunold_params\n params.require(:paramunold).permit!\n end",
"def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend",
"def filtered_parameters; end",
"def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end",
"def filtering_params\n params.permit(:email, :name)\n end",
"def check_params\n true\n end",
"def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend",
"def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend",
"def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end",
"def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end",
"def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end",
"def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend",
"def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end",
"def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end",
"def active_code_params\n params[:active_code].permit\n end",
"def filtering_params\n params.permit(:email)\n end",
"def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end",
"def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end",
"def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end",
"def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end",
"def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end",
"def list_params\n params.permit(:name)\n end",
"def filter_parameters; end",
"def filter_parameters; end",
"def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end",
"def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end",
"def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end",
"def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end",
"def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end",
"def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end",
"def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end",
"def url_whitelist; end",
"def admin_social_network_params\n params.require(:social_network).permit!\n end",
"def filter_params\n params.require(:filters).permit(:letters)\n end",
"def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end",
"def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end",
"def sensitive_params=(params)\n @sensitive_params = params\n end",
"def permit_request_params\n params.permit(:address)\n end",
"def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end",
"def secure_params\n params.require(:location).permit(:name)\n end",
"def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end",
"def question_params\n params.require(:survey_question).permit(question_whitelist)\n end",
"def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end",
"def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end",
"def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end",
"def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end",
"def url_params\n params[:url].permit(:full)\n end",
"def backend_user_params\n params.permit!\n end",
"def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend",
"def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end",
"def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end",
"def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end",
"def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end",
"def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end",
"def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end",
"def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end",
"def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end"
] | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",
"0.62894756",
"0.6283177",
"0.6242471",
"0.62382483",
"0.6217549",
"0.6214457",
"0.6209053",
"0.6193042",
"0.6177802",
"0.6174604",
"0.61714715",
"0.6161512",
"0.6151757",
"0.6150663",
"0.61461",
"0.61213595",
"0.611406",
"0.6106206",
"0.6105114",
"0.6089039",
"0.6081015",
"0.6071004",
"0.60620916",
"0.6019971",
"0.601788",
"0.6011056",
"0.6010898",
"0.6005122",
"0.6005122",
"0.6001556",
"0.6001049",
"0.59943926",
"0.5992201",
"0.59909594",
"0.5990628",
"0.5980841",
"0.59669393",
"0.59589154",
"0.5958826",
"0.5957911",
"0.5957385",
"0.5953072",
"0.59526145",
"0.5943361",
"0.59386164",
"0.59375334",
"0.59375334",
"0.5933856",
"0.59292704",
"0.59254247",
"0.5924164",
"0.59167904",
"0.59088355",
"0.5907542",
"0.59064597",
"0.5906243",
"0.5898226",
"0.589687",
"0.5896091",
"0.5894501",
"0.5894289",
"0.5891739",
"0.58860534",
"0.5882406",
"0.587974",
"0.58738774",
"0.5869024",
"0.58679986",
"0.5867561",
"0.5865932",
"0.5864461",
"0.58639693",
"0.58617616",
"0.5861436",
"0.5860451",
"0.58602303",
"0.5854586",
"0.58537364",
"0.5850427",
"0.5850199"
] | 0.0 | -1 |
Only allow a trusted parameter "white list" through. | def slot_params
params.require(:slot).permit(
:name,
:locked,
:order,
:user_id,
)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def allowed_params\n ALLOWED_PARAMS\n end",
"def expected_permitted_parameter_names; end",
"def param_whitelist\n [:role, :title]\n end",
"def default_param_whitelist\n [\"mode\"]\n end",
"def permitir_parametros\n \t\tparams.permit!\n \tend",
"def permitted_params\n []\n end",
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def filtered_parameters; end",
"def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end",
"def parameters_list_params\n params.require(:parameters_list).permit(:name, :description, :is_user_specific)\n end",
"def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end",
"def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end",
"def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end",
"def param_whitelist\n [:rating, :review]\n end",
"def valid_params?; end",
"def permitted_params\n declared(params, include_missing: false)\n end",
"def permitted_params\n declared(params, include_missing: false)\n end",
"def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend",
"def filter_parameters; end",
"def filter_parameters; end",
"def strong_params\n params.require(:team_member).permit(param_whitelist)\n end",
"def strong_params\n params.require(:community).permit(param_whitelist)\n end",
"def check_params; true; end",
"def valid_params_request?; end",
"def strong_params\n params.require(:experience).permit(param_whitelist)\n end",
"def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end",
"def list_params\n params.permit(:name)\n end",
"def check_params\n true\n end",
"def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end",
"def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end",
"def additional_permitted_params\n []\n end",
"def strong_params\n params.require(:education).permit(param_whitelist)\n end",
"def resource_params\n params[resource_singular_name].try(:permit, self.class.param_whitelist)\n end",
"def allow_params_authentication!; end",
"def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end",
"def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end",
"def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end",
"def paramunold_params\n params.require(:paramunold).permit!\n end",
"def param_params\n params.require(:param).permit(:param_category_id, :param_table_id, :name, :english_name, :weighting, :description)\n end",
"def quote_params\n params.permit!\n end",
"def list_params\n params.permit(:list_name)\n end",
"def allowed_params(parameters)\n parameters.select do |name, values|\n values.location != \"path\"\n end\n end",
"def all_params; end",
"def permitted_resource_params\n params[resource.object_name].present? ? params.require(resource.object_name).permit! : ActionController::Parameters.new\n end",
"def source_params\n params.require(:source).permit(all_allowed_params)\n end",
"def user_params\n end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def params; end",
"def get_allowed_parameters\n return _get_specific_action_config(:allowed_action_parameters, :allowed_parameters)&.map(&:to_s)\n end",
"def permitted_params\n @wfd_edit_parameters\n end",
"def user_params\r\n end",
"def param_whitelist\n whitelist = [\n :comment,\n :old_progress, :new_progress,\n :metric_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:metric_id)\n end\n \n whitelist\n end",
"def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend",
"def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end",
"def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend",
"def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end",
"def get_params\n\t\t\n\t\treturn ActionController::Parameters.new(self.attributes).permit(:first_name, :last_name, :email, :provider)\n\n\tend",
"def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end",
"def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end",
"def valid_parameters\n sort_symbols(@interface.allowed_parameters)\n end",
"def params_permit\n params.permit(:id)\n end",
"def allowed_params\n params.require(:allowed).permit(:email)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def filter_params\n params.permit(*resource_filter_permitted_params)\n end",
"def community_params\n params.permit(:profile_image, :name, :description, :privacy_type, :viewed_by, {tags: []}, {features: []}, {admins: []}, :members, :location, :beacon, :creator, :ambassadors, :current_events, :past_events, :feed, :category, :address, :allow_member_post_to_feed, :allow_member_post_to_events)\n end",
"def specialty_params\n\t\tparams.require(:specialty).permit(*Specialty::DEFAULT_ACCESSIBLE_ATTRIBUTES)\n\tend",
"def authorize_params\n super.tap do |params|\n %w[display scope auth_type].each do |v|\n if request.params[v]\n params[v.to_sym] = request.params[v]\n end\n end\n end\n end",
"def feature_params_filter\n params.require(:feature).permit(:name, :cat, :lower, :upper, :opts, :category, :description, :company, :active, :unit, :icon)\n end",
"def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end",
"def argument_params\n params.require(:argument).permit(:name)\n end",
"def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end",
"def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end",
"def property_params\n params.permit(:name, :is_available, :is_approved, :owner_id)\n end",
"def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end",
"def sponsor_params\n params.require(:sponsor).permit(WHITE_LIST)\n end",
"def whitelist_person_params\n params.require(:person).permit(:family, :pre_title, :given_name, :dates, :post_title, :epithet, :dates_of_office, same_as: [], related_authority: [], altlabel: [], note: []) # Note - arrays need to go at the end or an error occurs!\n end",
"def parameters\n nil\n end",
"def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end",
"def sequence_param_whitelist\n default_param_whitelist << \"show_index\"\n end",
"def resource_filter_permitted_params\n raise(NotImplementedError, 'resource_filter_permitted_params method not implemented')\n end",
"def normal_params\n reject{|param, val| param_definitions[param][:internal] }\n end",
"def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end",
"def special_device_list_params\n params.require(:special_device_list).permit(:name)\n end",
"def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end"
] | [
"0.7121987",
"0.70541996",
"0.69483954",
"0.6902367",
"0.6733912",
"0.6717838",
"0.6687021",
"0.6676254",
"0.66612333",
"0.6555296",
"0.6527056",
"0.6456324",
"0.6450841",
"0.6450127",
"0.6447226",
"0.6434961",
"0.64121825",
"0.64121825",
"0.63913447",
"0.63804525",
"0.63804525",
"0.6373396",
"0.6360051",
"0.6355191",
"0.62856233",
"0.627813",
"0.62451434",
"0.6228103",
"0.6224965",
"0.6222941",
"0.6210244",
"0.62077755",
"0.61762565",
"0.61711127",
"0.6168448",
"0.6160164",
"0.61446255",
"0.6134175",
"0.6120522",
"0.6106709",
"0.60981655",
"0.6076113",
"0.60534036",
"0.60410434",
"0.6034582",
"0.6029977",
"0.6019861",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.6019158",
"0.60184896",
"0.60157263",
"0.6005857",
"0.6003803",
"0.60012573",
"0.59955895",
"0.5994598",
"0.5993604",
"0.5983824",
"0.5983166",
"0.5977431",
"0.597591",
"0.5968824",
"0.5965953",
"0.59647584",
"0.59647584",
"0.59566855",
"0.59506303",
"0.5950375",
"0.59485626",
"0.59440875",
"0.5930872",
"0.5930206",
"0.5925668",
"0.59235454",
"0.5917905",
"0.59164816",
"0.5913821",
"0.59128743",
"0.5906617",
"0.59053683",
"0.59052664",
"0.5901591",
"0.58987755",
"0.5897456",
"0.58970183",
"0.58942604"
] | 0.0 | -1 |
GET /users GET /users.json | def index
@users = User.all
pagination
respond_to do |format|
format.html # index.html.erb
format.json { render json: @users }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def users(args = {})\n get(\"/users.json\",args)\n end",
"def show\n begin\n user = User.find(params[:user_id])\n render json: { users: user }, status: :ok\n rescue => e\n render json: { errors: e.message}, status: 404\n end\n end",
"def GetUsers params = {}\n\n params = params.merge(path: 'users.json')\n APICall(params)\n\n end",
"def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/users/#{params[:id]}.json\"\n\t response = RestClient.get(url)\n\t @user = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/users.json\"\t \n response = RestClient.get(url)\n @users = JSON.parse(response.body)\t\t \n\tend\n end",
"def list_users\n self.class.get('/users')\n end",
"def users\n get('get_users')\n end",
"def index\n users = User.all\n json_response(users)\n end",
"def show\n @users = User.all\n json_response(@users)\n end",
"def list\r\n users = User.all\r\n render json: users\r\n end",
"def show\n @users = User.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @users }\n end\n end",
"def get \n render :json => User.find(params[:id])\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n users = User.all\n render json: { users: users }, status: :ok\n end",
"def index\r\n users = User.all\r\n render json: users\r\n end",
"def users(params = {})\n params.merge!(key: 'users')\n objects_from_response(Code42::User, :get, 'user', params)\n end",
"def index\n users = User.all\n render json: users\n end",
"def index\n users = User.all\n render json: users\n end",
"def index\n users = User.all\n render json: users\n end",
"def index\n users = User.all\n render json: users\n end",
"def users(params = {})\n make_get_request('/account/users', params)\n end",
"def index\n users = User.all\n render json: users \n end",
"def index\n users = User.all\n\n render json: users, each_serializer: Api::V1::UsersSerializer\n end",
"def index\n user= User.all\n render json: {users:user}\n end",
"def index\n @users = User.all\n render json: @users, status: :ok\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n json_response(User.all) \n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def user_info\n @user = @github.users.get user: params[:username]\n render json: Hash[@user]\n end",
"def index\n users = User.all \n render json: users \n end",
"def index\n\t\t# specifying json format in the URl\n\t uri = \"#{API_BASE_URL}/users.json\"\n\t # It will create new rest-client resource so that we can call different methods of it\n\t rest_resource = RestClient::Resource.new(uri, USERNAME, PASSWORD)\n\n\t # this next line will give you back all the details in json format, \n\t #but it will be wrapped as a string, so we will parse it in the next step.\n\t users = rest_resource.get \n\n\t # we will convert the return data into an array of hash. see json data parsing here\n\t @users = JSON.parse(users, :symbolize_names => true)\n\tend",
"def index\n\t\t@users = User.all\n\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.json { render json: @users.map(&:as_json) }\n\t\tend\n\tend",
"def list\n render json: User.all\n end",
"def index\n @users = User.all\n render json: @users, status: :ok\n end",
"def user\n render :json=> User.find(params[:id])\n end",
"def index\n\n users = User.all \n render json: users\n\n end",
"def show\n render json: Users.find(params[\"id\"])\n end",
"def GetUser id\n\n APICall(path: \"users/#{id}.json\")\n\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html\n format.json { render json: @users }\n end\n end",
"def show\n @users = User.find(params[:id])\n if @users\n respond_to do |format|\n format.json { render :json => @users }\n format.xml { render :xml => @users }\n end\n else\n head :not_found\n end\n end",
"def index\n \t@users = User.all\n\n respond_to do |format| \n format.json { render json: @users }\n end\n end",
"def list\n get('users')['users']\n end",
"def index\n render ActiveModelSerializers::SerializableResource.new(@users,\n each_serializer: UserSerializer\n ).to_json, status: 200\n end",
"def index\n @users = User.all \n render json: @users, status: :ok \n end",
"def index\n @users = User.all\n logger.debug(\"user index\")\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n render json: User.all\n end",
"def index\n @users = User.order_by(last_name: :desc)\n if @users\n render json: Oj.dump(json_for(@users, include: ['phones', 'cards'], meta: meta), mode: :compat)\n else\n return head :unauthorized\n end\n end",
"def users(params = {})\n response = get('users/lookup.json', params)\n response.map {|user| Croudia::Object::User.new(user) }\n end",
"def index\n render json: User.all\n end",
"def index\n render json: User.all\n end",
"def show\n user = User.find(params[:id])\n render json: @user\nend",
"def list_users(user_id)\n self.class.get(\"/users/#{user_id}\")\n end",
"def show\n user = User.find(params[:id])\n render json: user\n end",
"def index\n\t\t@users = User.all\n\n\t\trespond_to do |format|\n\t\t format.html # index.html.erb\n\t\t format.json { render json: @users }\n\t\tend\n\tend",
"def index\n @users = User.all(limit: 100)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users.as_json(user: current_user) }\n end\n end",
"def get_users\r\n # Prepare query url.\r\n _path_url = '/users'\r\n _query_builder = Configuration.get_base_uri\r\n _query_builder << _path_url\r\n _query_url = APIHelper.clean_url _query_builder\r\n # Prepare headers.\r\n _headers = {\r\n 'accept' => 'application/json'\r\n }\r\n # Prepare and execute HttpRequest.\r\n _request = @http_client.get(\r\n _query_url,\r\n headers: _headers\r\n )\r\n CustomHeaderAuth.apply(_request)\r\n _context = execute_request(_request)\r\n validate_response(_context)\r\n # Return appropriate response type.\r\n decoded = APIHelper.json_deserialize(_context.response.raw_body)\r\n decoded.map { |element| User.from_hash(element) }\r\n end",
"def show\n # When a http GET request to '/users/1' is received, have it show,\n # in json format, user 1's information.\n @id = params[:id]\n @user = User.find(@id)\n render json: @user\n end",
"def show\n user = User.find(params[:id])\n\n render json: user\n end",
"def index \n render json: User.all\n end",
"def index\n @myusers = Myuser.all\n\n render json: @myusers\n end",
"def index\n\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def query_users(options={}) path = \"/api/v2/users\"\n get(path, options, AvaTax::VERSION) end",
"def list\n response = @client.get(\"/users\")\n response[\"users\"].map {|u| User.new(@client, u) }\n end",
"def users\n\t\trespond_with User.all\n\tend",
"def index\n @users = User.all\n\n respond_with do |format|\n format.json do\n render json: @users,\n each_serializer: Api::UserSerializer,\n root: 'users'\n end\n end\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n @users = User.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end"
] | [
"0.82109934",
"0.7873764",
"0.7860689",
"0.78108346",
"0.78067017",
"0.7678852",
"0.76586664",
"0.76318866",
"0.7582366",
"0.75291824",
"0.7487637",
"0.74485743",
"0.7439024",
"0.7437192",
"0.7427442",
"0.73978853",
"0.73978853",
"0.73978853",
"0.73978853",
"0.7377353",
"0.7372414",
"0.736885",
"0.7368531",
"0.7367068",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7358582",
"0.7351495",
"0.7350187",
"0.7350187",
"0.7350187",
"0.7350187",
"0.7350187",
"0.7350187",
"0.73463756",
"0.73426867",
"0.7331111",
"0.73231107",
"0.73227614",
"0.73126787",
"0.7295692",
"0.7274169",
"0.7265484",
"0.72624177",
"0.72607577",
"0.722517",
"0.72189873",
"0.71941674",
"0.71883225",
"0.7187108",
"0.71815044",
"0.717089",
"0.71695215",
"0.7156781",
"0.71546155",
"0.71546155",
"0.7140691",
"0.7135879",
"0.7134857",
"0.71316093",
"0.71315825",
"0.712011",
"0.7114429",
"0.7112858",
"0.7107888",
"0.7098051",
"0.70957917",
"0.70957917",
"0.7093039",
"0.70904744",
"0.70890427",
"0.70889443",
"0.7085115",
"0.7085115",
"0.7085115",
"0.7085115",
"0.7085115",
"0.7085115",
"0.7085115",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685",
"0.7081685"
] | 0.0 | -1 |
GET /users/1 GET /users/1.json | def show
if @oeuser.id.to_i == params[:id].to_i
@user = User.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @user }
end
else
flash[:error] = "Restricted Access, You need to be Admin"
redirect_to root_url
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/users/#{params[:id]}.json\"\n\t response = RestClient.get(url)\n\t @user = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/users.json\"\t \n response = RestClient.get(url)\n @users = JSON.parse(response.body)\t\t \n\tend\n end",
"def get \n render :json => User.find(params[:id])\n end",
"def GetUser id\n\n APICall(path: \"users/#{id}.json\")\n\n end",
"def show\n begin\n user = User.find(params[:user_id])\n render json: { users: user }, status: :ok\n rescue => e\n render json: { errors: e.message}, status: 404\n end\n end",
"def users(args = {})\n get(\"/users.json\",args)\n end",
"def show\n # When a http GET request to '/users/1' is received, have it show,\n # in json format, user 1's information.\n @id = params[:id]\n @user = User.find(@id)\n render json: @user\n end",
"def user\n render :json=> User.find(params[:id])\n end",
"def fetch_one_user_data\n get_url(\"/api/v1/users/#{@filter}\")\n end",
"def show\n user = User.find(params[:id])\n render json: @user\nend",
"def show\n user = User.find(params[:id])\n render json: user\n end",
"def show\n user = User.find(params[:id])\n\n render json: user\n end",
"def show\n render json: Users.find(params[\"id\"])\n end",
"def show\n user = User.find(params[:id])\n render json: user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n @user = User.find(params[:id])\n\n render json: @user\n end",
"def show\n user = User.select(:id, :username, :email).find(params[:id])\n render :json => user\n end",
"def show\n render json: User.find(params[\"id\"])\n end",
"def show\n @users = User.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @users }\n end\n end",
"def show\n @user = User.find(params[:id])\n render json: @user\nend",
"def user_info\n @user = @github.users.get user: params[:username]\n render json: Hash[@user]\n end",
"def show\n render json: User.find(params[:id])\n end",
"def show\n @user = User.find(params[:id])\n render json:@user\n end",
"def show\n @user = User.find(params[:id])\n render json:@user\n end",
"def get_by_id\n \n # the user_id param comes from our route\n user = User.find(params[:user_id])\n \n if user\n render json: user, status: :ok\n else\n render json: { errors: 'User not found' }, status: :not_found\n end\n end",
"def GetUsers params = {}\n\n params = params.merge(path: 'users.json')\n APICall(params)\n\n end",
"def get_user_details\n @user = User.find_by_id(params[:user_id])\n render json: @user\n end",
"def show\n render json: User.find(params[:id])\n end",
"def show\n user = User.find_by(id: params[:id])\n render json: user, status: :ok\n end",
"def user(id)\n self.class.get(\"/user/#{id}\", @options).parsed_response\n end",
"def show\n @user = User.find(params[:id])\n render json: {user: @user}\n end",
"def list_users\n self.class.get('/users')\n end",
"def show\n user = User.find(params[:id])\n render json: user\n end",
"def show\n user = User.friendly.find(params[:user_id]) \n render json: user\n end",
"def show\n render :json => User.find(params[:id])\n end",
"def show(id)\n response = request(:get, \"/users/#{id}.json\")\n response[\"user\"]\n end",
"def index\n users = User.all\n json_response(users)\n end",
"def show\n @user = ActiveRecord::Base.connection.execute(\"\n SELECT * \n FROM users \n WHERE username = '#{params[:username].downcase}' \n LIMIT 1\").first\n\n respond_to do |format|\n format.html\n format.json {render json: User.find(@user[0])}\n end\n end",
"def show(id)\n response = request(:get, \"/users/#{id}.json\")\n response.first[1]\n end",
"def show\n @users = User.all\n json_response(@users)\n end",
"def index\n json_response(User.all) \n end",
"def get(user_id:)\n path = '/users/{userId}'\n .gsub('{userId}', user_id)\n\n if user_id.nil?\n raise Appwrite::Exception.new('Missing required parameter: \"userId\"')\n end\n\n params = {\n }\n \n headers = {\n \"content-type\": 'application/json',\n }\n\n @client.call(\n method: 'GET',\n path: path,\n headers: headers,\n params: params,\n response_type: Models::User\n )\n end",
"def index\n users = User.all\n render json: { users: users }, status: :ok\n end",
"def show\n # @user = User.first\n user = User.find(params[:id])\n render json: user\n end",
"def user(user_id, params = {})\n make_get_request(\"/users/#{user_id}\", params)\n end",
"def show_user_profile\n @user = User.find(username: params[:username])\n render json: @user\n end",
"def user(id = nil)\n id.to_i.zero? ? get('/user') : get(\"/users/#{id}\")\n end",
"def get_user id, options={}, headers={}\n @connection.get \"users/#{id}.json\", options, headers\n end",
"def user(user=nil)\n if user\n get(\"/users/#{user}\", {}, 3)\n else\n get(\"/user\", {}, 3)\n end\n end",
"def index\n \n @user = User.find(current_user.id) \n\n respond_to do |format|\n format.html { render action: \"show\" }\n format.json { render json: @user }\n end\n end",
"def show\n @user = User.find(params[:id])\n\n respond_to do |format|\n format.html\n format.json { render json: @user }\n end\n end",
"def get_user(user_id:)\n parse(JSON.parse(connection.get(\"users/#{user_id}\").body))\n end",
"def index\n user= User.all\n render json: {users:user}\n end",
"def index\r\n users = User.all\r\n render json: users\r\n end",
"def show\n # puts params[:id]\n render json: User.find(params[:id])\n end",
"def get_user_info\n id = params[\"id\"]\n error_list = []\n status = 1\n json_response = {}\n user = User.find_by(id: id)\n\n if user.nil?\n error_list.append(\"Error: The specified user doesn't exist.\")\n status = -1\n else\n json_response[\"user\"] = user.get_user_json_data\n end\n\n if status == -1\n json_response[\"errors\"] = error_list\n end\n\n json_response[\"status\"] = status\n\n # Format the json_response into proper JSON and respond with it\n json_response = json_response.to_json\n\n respond_to do |format|\n format.json { render json: json_response }\n end\n end",
"def show\n @user = User.find(params[:id])\n if @user\n render json: {\n user: @user\n }\n else\n render json: {\n status: 500,\n errors: ['user not found']\n }\n end\n end",
"def index\n users = User.all\n render json: users\n end",
"def index\n users = User.all\n render json: users\n end",
"def index\n users = User.all\n render json: users\n end",
"def index\n users = User.all\n render json: users\n end",
"def show\n @user = User.find(params[:id])\n render json: {\n username: @user.username,\n first_name: @user.first_name,\n last_name: @user.last_name,\n email: @user.email,\n phone_number: @user.phone_number,\n contacts: @user.contacts\n }, status: :ok\n end",
"def get_user(user_id)\n request(Route.new(:GET, '/users/%{user_id}', user_id: user_id))\n end",
"def show\n @user = User.find(params[:id])\n render 'api/v1/users/show'\n end",
"def index\n users = User.all\n\n render json: users, each_serializer: Api::V1::UsersSerializer\n end",
"def index\n users = User.all\n render json: users \n end",
"def user(user_id)\n params = {\n :client_id => Swiftype.platform_client_id,\n :client_secret => Swiftype.platform_client_secret\n }\n get(\"users/#{user_id}.json\", params)\n end",
"def index\n users = User.all \n render json: users \n end",
"def list\r\n users = User.all\r\n render json: users\r\n end",
"def json_show_user_profile_by_user_id\n @user = User.find(params[:user_id])\n\n respond_to do |format|\n format.json { render json: @user.as_json(only:[:email,:username]) }\n end\n end",
"def index\n\t\t# specifying json format in the URl\n\t uri = \"#{API_BASE_URL}/users.json\"\n\t # It will create new rest-client resource so that we can call different methods of it\n\t rest_resource = RestClient::Resource.new(uri, USERNAME, PASSWORD)\n\n\t # this next line will give you back all the details in json format, \n\t #but it will be wrapped as a string, so we will parse it in the next step.\n\t users = rest_resource.get \n\n\t # we will convert the return data into an array of hash. see json data parsing here\n\t @users = JSON.parse(users, :symbolize_names => true)\n\tend",
"def show\n user = User.find_by(uid: params[:id])\n if user\n puts 'USER FOUND'\n render json: user\n else\n puts 'NO USER'\n render json: 'no user'.to_json\n end\n end",
"def show\n render json: UserService.get_user(params[:id]), includes: 'questions, answers'\n end",
"def index\n @users = User.all(limit: 100)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users.as_json(user: current_user) }\n end\n end",
"def index\n render :json => User.all, status: 200\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users\n end",
"def index\n @users = User.all\n render json: @users, status: :ok\n end",
"def show\n @users = User.find(params[:id])\n if @users\n respond_to do |format|\n format.json { render :json => @users }\n format.xml { render :xml => @users }\n end\n else\n head :not_found\n end\n end",
"def index\n @users = User.all\n\n render json: @users\n end",
"def index\n @users = User.all\n\n render json: @users\n end"
] | [
"0.81046426",
"0.7703556",
"0.77011716",
"0.76262826",
"0.7582106",
"0.74818",
"0.7461394",
"0.7446168",
"0.730656",
"0.7300699",
"0.72902125",
"0.72781444",
"0.72358584",
"0.72335744",
"0.72335744",
"0.72335744",
"0.72335744",
"0.72335744",
"0.72335744",
"0.72335744",
"0.7225407",
"0.7225407",
"0.7225407",
"0.7225407",
"0.7225407",
"0.7225407",
"0.7225407",
"0.7225407",
"0.72222257",
"0.72165024",
"0.72137505",
"0.72096044",
"0.71930283",
"0.7182953",
"0.7182144",
"0.7182144",
"0.7180289",
"0.71750754",
"0.7173851",
"0.71640617",
"0.71636444",
"0.71453786",
"0.7145053",
"0.7129776",
"0.71256554",
"0.71160513",
"0.7095665",
"0.70941204",
"0.70772994",
"0.7070785",
"0.7070607",
"0.7063351",
"0.70552826",
"0.7025071",
"0.7014598",
"0.70047677",
"0.6998373",
"0.69910055",
"0.6984177",
"0.6979766",
"0.6972448",
"0.6972228",
"0.6968384",
"0.69666255",
"0.6956339",
"0.69506294",
"0.6945614",
"0.6943135",
"0.69351804",
"0.6932212",
"0.6932212",
"0.6932212",
"0.6932212",
"0.6927094",
"0.69255126",
"0.6925136",
"0.6917375",
"0.6907744",
"0.68947464",
"0.6882589",
"0.6875701",
"0.68749416",
"0.68633634",
"0.6861618",
"0.6858055",
"0.6855495",
"0.68530583",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.685255",
"0.6849599",
"0.6847195",
"0.6847074",
"0.6847074"
] | 0.0 | -1 |
GET /users/new GET /users/new.json | def new
@user = User.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @user }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @newuser = Newuser.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @newuser }\n end\n end",
"def new\n @usernew = Usernew.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @usernew }\n end\n end",
"def new\n @user = user.new\n\t\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n # When a http GET request to '/users/new' is received, have it render:\n # a view file with an empty form to create a new user.\n end",
"def new\n @user = User.new\n @action = \"new\"\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n @users = User.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @users }\n end\n end",
"def new2\n @user = User.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n \n @user = User.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n \n end",
"def new\n @user = User.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n @user = User.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n @user = User.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n @user = User.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end",
"def new\n @user = User.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user }\n end\n end"
] | [
"0.8287397",
"0.8169197",
"0.8155916",
"0.80483407",
"0.8022376",
"0.8021751",
"0.8009459",
"0.7950995",
"0.793078",
"0.793078",
"0.7873476",
"0.7873476",
"0.7873476"
] | 0.7860956 | 90 |
POST /users POST /users.json | def create
@user = User.new(params[:user])
respond_to do |format|
if @user.save
UserMailer.welcome_email(@user).deliver
format.html { redirect_to @user, notice: 'User was successfully created.' }
format.json { render json: @user, status: :created, location: @user }
else
format.html { render action: "new" }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def post_users(users)\n self.class.post('https://api.yesgraph.com/v0/users', {\n :body => users.to_json,\n :headers => @options,\n })\n end",
"def CreateUser params = {}\n \n APICall(path: 'users.json',method: 'POST',payload: params.to_json)\n \n end",
"def post body=nil, headers={}\n @connection.post \"users.json\", body, headers\n end",
"def create\n # render json: params\n render json: Users.create(params[\"user\"])\n end",
"def create_user(params:)\n parse(JSON.parse(connection.post(\"users\", params.to_json).body))\n end",
"def create\n user = User.create(user_params) \n render json: user, status: :created\n end",
"def create\n user = User.new(user_params)\n if user.save\n render json: user\n else\n render json: {errors: \"Cannot create user\"}, :status => 420\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(form_params)\n\n respond_to do |format|\n if @user.save\n format.json { render json: { users: @user }, status: :created }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n user = User.new(\n username: user_params[:username],\n password: user_params[:password])\n if user.save\n create_example_collection(user)\n render json: user, except: [:password_digest, :created_at, :updated_at]\n else\n render json: {errors: user.errors.full_messages}\n end\n end",
"def create\n user= User.create(user_params)\n render json: user\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n\t\t@user = User.new(users_params)\n\t\tif @user.save\n\t\t\tjson_response(@user, \"User is created Successfully.\")\n\t\telse\n\t\t\trender json: {message: @user.errors.full_messages.join(\" \")}, status: 400\n\t\tend\t\t\n\tend",
"def create\n user = User.new(@user_info)\n if user.save && user.errors.empty?\n render json: { status: 200, data: UserSerializer.new(user).as_json }\n else\n render json: { status: 400, error: user.errors.full_messages }\n end\n end",
"def create\n user = User.create(user_params)\n if user.valid?\n render json: user\n else\n render json: user.errors, status: :unprocessable_entity\n end\n end",
"def create(options = {})\n request(:post, '/users.json', default_params(options))\n end",
"def create\n @user = User.new user_params(params[:user])\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new user_params(params[:user])\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.create user_params\n \n if @user.save\n respond_with(@user) do |format|\n format.json {render}\n end\n end\n end",
"def create\n @user = User.new(user_params(params))\n \n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n\n respond_to do |format|\n if @user.save\n format.json { render json: @user }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(user_params(params))\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params(params))\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create_user\n @user = User.new(user_params)\n if @user.save\n render json: UserSerializer.new(@user).serialized_json\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = @application.users.create(user_params)\n\n if @user.valid?\n render json: @user, status: :created, location: api_application_user_path(@application,@user)\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n user = User.create(user_params)\n if user.save\n render json: user\n else\n render json: user.errors, status: :bad\n end\n end",
"def create\n r = @api.create_user(user_params)\n respond_to do |format|\n if r.code == 201\n format.html { redirect_to users_url, notice: 'User was successfully created.' }\n else\n response = JSON.parse(r.body)\n format.html { redirect_to users_url, alert: response['message']}\n end\n end\n end",
"def create\n\n puts '-----------------------create in user controller'\n\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: UserSerializer.new(@user).serialized_json\n else\n render json: { error: I18n.t('user_create_error') }, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n if @user.save\n render json: { user: @user, success: 'User registration successful' }\n else\n render json: { error: 'User registration unsuccessful' }\n end\n end",
"def create\n @user = User.new(user_params)\n \n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n\t\tputs user_params\n\t\tuser = User.new(user_params)\n\t\tif user.save\n\t\t\trender json: { user: user, status: :success }\n\t\telse\n\t\t\trender json: { status: :failure, errors: user.errors.full_messages.join('') }\n\t\tend\n\tend",
"def create\n\t\t@user = User.new(user_params)\n\t\tif @user.save\n\t\t\trender json: @user, status: :created, location: @user\n\t\telse\n\t\t\trender json: @user.errors, status: :unprocessable_entity\n\t\tend\n\tend",
"def add_user(name, value)\n self.class.post(\"/users/#{name}\",\n body: value,\n headers: {\n 'Content-Type' => 'application/json; charset=UTF-8',\n Connection: 'keep-alive',\n Accept: 'application/json, text/plain, */*'\n })\n end",
"def create\n user = User.new(user_params)\n\n respond_to do |format|\n if user.save\n render json: user, status: :ok\n else\n format.json { render json: user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = current_user.users.build(user_params)\n\n if @user.save\n render json: @user\n else\n @user_items = []\n end\n end",
"def create\n user = User.new(user_params)\n render json: { status: 200, msg: 'User was created.', data: \"User Id #{user.id}\" } if user.save\n end",
"def create\n @users = User.new(params[:user])\n\n respond_to do |format|\n if @users.save\n format.html { redirect_to @users, notice: 'Regist was successfully created.' }\n format.json { render json: @users, status: :created, location: @users }\n else\n format.html { render action: \"new\" }\n format.json { render json: @users.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n user = User.new(user_params)\n if user.save\n render :json => user, :status => :created\n else\n render :json => {:ok => false, :message => user.errors}, :status => :unprocessable_entity\n end\n end",
"def create\n logger.debug user_params\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :ok\n else\n render json: @user.errors, status: :not_acceptable\n end\n end",
"def create\n user = User.create(user_params)\n render json: user, message: 'user succefully create', status: 200\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n\n up = user_params\n\n if up[:name].present?\n up[:first_name] = up[:name].split(' ')[0]\n up[:last_name] = up[:name].split(' ')[1]\n up.delete :name\n end\n @user = User.new(up)\n\n respond_to do |format|\n if @user.save\n # render json: {user: user, token: token}\n\n format.html { redirect_to @user, notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: api_user_url(@user)}\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n user = User.new(user_params)\n if user.save\n render json: {status: \"Se creo el usuario\"}, status: :ok\n else\n render json: {status: \"Error al crear el usuario\", errors: user.errors }, status: :unprocessable_entity\n end\n end",
"def create\n user = User.new(params[:user].permit(:username))\n if user.save\n render json: user\n else\n render json: user.errors.full_messages, status: :unprocessable_entity\n end\n end",
"def create\n puts '>>> params:'\n puts params.inspect\n @user = User.new(params[:user])\n puts '>>> User:'\n puts @user.inspect\n\n if @user.save\n render json: @user, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n \tdata = { data: @user, status: :created, message: \"User was successfully created.\" }\n render :json => data\n else\n \tdata = { data: @user.errors, status: :unprocessable_entity }\n render :json => data\n end\n end",
"def create\n user_details = params.permit(:first_name, :last_name, :email)\n success = User.create(user_details)\n\n render json: { success: success }\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: @user.as_json(only: [:email, :authentication_token]), status: :created\n else\n head(:unprocessable_entity)\n end\n end",
"def create_user\n params = {\n :client_id => Swiftype.platform_client_id,\n :client_secret => Swiftype.platform_client_secret\n }\n post(\"users.json\", params)\n end",
"def create\n @user = User.new(params[:user])\n\n if @user.save\n respond_to do |format|\n format.json { render :json => @user.to_json, :status => 200 }\n format.xml { head :ok }\n format.html { redirect_to :action => :index }\n end\n else\n respond_to do |format|\n format.json { render :text => \"Could not create user\", :status => :unprocessable_entity } # placeholder\n format.xml { head :ok }\n format.html { render :action => :new, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(user_params)\n if @user.save\n render :ok, json: @user.to_json\n else\n @errors = @user.errors.full_messages\n render json: { message: @errors }, status: :unauthorized\n end\n end",
"def create\n puts params\n @user = User.new(params[:user])\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to @user, notice: 'User was successfully created.' }\n format.json { render json: @user.as_json(user: current_user), status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n user = User.new(user_params)\n if user.save\n render json: {status: 200, msg: 'User was created.'}\n else\n render json: {errors: user.errors.messages}\n end\n end",
"def create\n @user = User.new(params[:user])\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to users_url, :notice => 'User was successfully created.' }\n format.json { render :json => @user, :status => :created, :location => @user }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new({name: params[:name], email: params[:email], password: params[:password], photo: params[:photo]})\n @user.save\n render json:@user\n end",
"def create\n user = User.create(user_params)\n\n if user.valid?\n render json: {user: UserSerializer.new(user), token: encode_token(user.id)}\n else\n render json: user.errors.full_messages\n end\n end",
"def create\n\t\tnew_user = User.new(user_params)\n\t\tif new_user.save\n\t\t render status: 200, json: {\n\t\t \tstatus: 200,\n\t\t message:\"New User Created\",\n\t\t response: {\n\t\t name: new_user.name,\n\t\t email: new_user.email,\n\t\t id: new_user.id,\n\t\t facebook_id: new_user.facebook_id,\n\t\t device_id: new_user.device_id,\n\t\t authentication_token: new_user.authentication_token\n\t\t }\n\t\t \n\t\t }.to_json\n\t\telse\n\t\t render status: 404, json: {\n\t\t \tstatus: 404,\n\t\t errors: new_user.errors\n\t\t }.to_json\n\t\tend\n\tend",
"def create\n\t\tresp = {} \n user = User.create(user_params)\n \tif user.valid?\n if user.save\n return render :json => user.as_json\n end\n end\n render json: user.errors.full_messages \n\tend",
"def post_users_with_http_info(users, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: UsersApi.post_users ...'\n end\n # verify the required parameter 'users' is set\n if @api_client.config.client_side_validation && users.nil?\n fail ArgumentError, \"Missing the required parameter 'users' when calling UsersApi.post_users\"\n end\n # resource path\n local_var_path = '/users'\n\n # query parameters\n query_params = opts[:query_params] || {}\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:body] || @api_client.object_to_http_body(users) \n\n # return_type\n return_type = opts[:return_type] || 'User' \n\n # auth_names\n auth_names = opts[:auth_names] || ['Bearer']\n\n new_options = opts.merge(\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: UsersApi#post_users\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_user(options = {})\n post \"/users\", options\n end",
"def create\n @user = User.new(params[:user])\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to users_url, notice: 'User was successfully created.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(params[:user])\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to users_url, notice: 'User was successfully created.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \n user = User.new(user_params)\n\n if user.save\n\n render json: {status: 200, msg: 'User was created.'}\n\n else \n render json: {\n errors: user.errors.full_messages\n }, status: :unprocessable_entity\n\n end\n\n end",
"def create\n @user = User.new(params[:user])\n\n respond_to do |format|\n if @user.save \n format.html { redirect_to users_url, notice: \"User #{@user.name} was successfully created.\" }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_user(body)\n post 'create_user', body\n end",
"def create\n @user = User.new(user_params)\n @user.email = params[:email].downcase\n if @user.save\n render json: @user, status: 200\n else\n render json: { errors: @user.errors.full_messages }, status: 400\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json:@user\n elsif @user.errors\n render json: {error: {code: 400, server_message: @user.errors}}, status: :bad_request\n else\n render json: {error: {code: 500, message: \"Could not save user\", server_message: @user.errors}}, status: :internal_server_error\n end\n\n end",
"def create\n user = User.new(user_params)\n\n if user.valid?\n user.save\n render json: {user: user, token: encode_token({user_id: user.id})}\n else\n render json: {error: \"Failed to create the user\"}\n end\n end",
"def create\n @user = User.new(user_params)\n @user.save\n respond_with @user\n end",
"def create\n @user = User.new(user_params)\n render json: @user && return if @user.save\n\n render json: { error: \"Unable to save user: #{@user.errors.messages}\" }, status: 400\n end",
"def create\n params[:user][\"_id\"] = params[:user][:name]\n @user = User.new(params[:user])\n\n respond_to do |format|\n if @user.save()\n format.html { redirect_to @user, notice: 'User was successfully created.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_user(attributes)\n post(\"/v1/users\", attributes)\n end",
"def create\n user = User.new(user_params)\n\n # if user is saved sucessfully it will return user and ith status 201 for created\n if user.save\n render json:user,status: :created\n #if request is properly served but data is wrong it ll give ubprocessable_entity with code 422\n else\n render json: user.errors, status: :unprocessable_entity\n end \n end",
"def create\r\n @user = User.new(params[:user])\r\n\r\n respond_to do |format|\r\n if @user.save\r\n format.html { redirect_to users_path, notice: 'Os dados do usuário foram salvos com sucesso!' }\r\n format.json { render json: @user, status: :created, location: @user }\r\n else\r\n format.html { render action: \"new\" }\r\n format.json { render json: @user.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n @user = User.new(\n first_name: params[:first_name],\n last_name: params[:last_name],\n birth_date: params[:birth_date],\n height: params[:height],\n weight: params[:weight],\n user_name: params[:user_name],\n password: params[:password],\n password_confirmation: params[:password_confirmation],\n facebook_url: params[:facebook_url],\n twitter_url: params[:twitter_url],\n instagram_url: params[:instagram_url],\n address: params[:address],\n email: params[:email]\n ) \n if @user.save!\n render 'successful.json.jb', status: :created\n else\n render 'unsuccessful.json.jb', status: :bad_request\n end\n end",
"def post(hash)\n HttpClient::Preconditions.assert_class('hash', hash, Hash)\n @client.request(\"/users\").with_json(hash.to_json).post { |hash| Apidoc::Models::User.new(hash) }\n end",
"def create\n user = User.create!(user_params)\n session[:user_id] = user.id\n render json: user, status: :created\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render json: {message: \"user create successfuly\"}\n else\n render json: {message: \"Error\"}\n end \n end",
"def create\n # Insert new user in database\n user = User.new(user_params)\n\n if user.save\n # On success, send token information to authenticate user\n token = create_token(user.id, user.username)\n render json: {status: 200, token: token, user: user}\n # render json: @user, status: :created, location: @user\n else\n render json: user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(params[:user])\n @user.status = 'active'\n\n respond_to do |format|\n if @user.save\n format.json { render :json => @user, :status => :created }\n format.html { redirect_to(users_path) }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n respond_with(@user, location: users_url, notice: 'User was successfully created.')\n else\n respond_with(@user)\n end\n end",
"def create\n user = User.new(user_params)\n \n if user.save\n token = JsonWebToken.encode(user_id: user.id)\n render json: { auth_token: token, user: AuthUserSerializer.new(user).serializable_hash }, status: 201\n else \n render json: { errors: user.errors.full_messages }, status: 400\n end\n end",
"def create\n @user = User.new(params[:user])\n puts params[:user]\n respond_to do |format|\n if @user.save\n format.html { redirect_to :users, notice: 'Registration successful.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render :show, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render :show, status: :created, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n user = User.create(user_params)\n if user.valid?\n user.username.downcase\n @token = issue_token(user)\n list = List.create(name: user.username)\n list.user_id = user.id\n user.save\n list.save\n render json: { user: UserSerializer.new(user), jwt: @token }, status: :created \n else \n render json: { error: user.errors.full_messages }, status: :not_acceptable\n end \n end",
"def create\n @user = User.new(user_params)\n respond_to do |format|\n if @user.save\n format.html { redirect_to users_path, notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(user_params)\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to users_path, notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n user_response = API::V1::Users.authenticate params.as_json\n if user_response.success?\n json = HashWithIndifferentAccess.new(user_response.parsed_response)\n auth_response = API::V1::Auth.issue json[:data]\n respond_with auth_response.body, auth_response.code\n else\n respond_with nil, :unauthorized\n end\n end",
"def create\n @user = User.new(user_params)\n\n if @user.save\n render :json => { :status => 0 }\n else\n render :json => { :status => 1, :msg => @user.errors}\n end\n end",
"def create\n @user = User.new(user_params)\n if @user.save\n auth_token = Knock::AuthToken.new payload: { sub: @user.id }\n render json: { username: @user.username, jwt: auth_token.token }, status: :created\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def create\n authorize :user, :create?\n @user = User.new(user_params)\n @user.save\n\n respond_to do |format|\n format.html\n format.json { render :json => @user, status: 200 }\n end\n end",
"def post_accounts(json_hash)\n @options = {:path => '/users.json',\n :body => json_hash[:json]}.merge(@options)\n\n request(\n :expects => 201,\n :method => :post,\n :body => @options[:body]\n )\n end",
"def create\n user = User.new(username: params[:username])\n if user.save\n payload = {'user_id': user.id}\n token = JWT.encode(payload, 'chatapp')\n render json: {\n user: user,\n token: token\n }\n else \n render json: { message: 'There was an error creating your account' }\n end\n end",
"def create\n user = User.create!(user_params)\n if user\n session[:user_id] = user.id\n render json: user, status: :created\n else\n render json: { errors: user.errors.full_messages }, status: :unprocessable_entity\n end\n end",
"def create\r\n @user = User.new user_params\r\n\r\n if @user.save\r\n render json: @user, serializer: SessionSerializer, root: nil\r\n else\r\n render json: { errors: @user.errors }, status: :unprocessable_entity\r\n end\r\n end",
"def create\n user = User.new(user_params)\n if user.save\n render json: { status: 'OK', msg: 'User was created.', error: 'nil' },\n status: :created\n else\n not_good(422)\n end\n end"
] | [
"0.77179813",
"0.75206673",
"0.73831296",
"0.72405374",
"0.719841",
"0.7140812",
"0.71038526",
"0.7058827",
"0.7041636",
"0.70236504",
"0.7003128",
"0.70021695",
"0.70021695",
"0.70021695",
"0.69936967",
"0.6990463",
"0.6980393",
"0.6979075",
"0.69788617",
"0.69788617",
"0.69762856",
"0.6962628",
"0.6952247",
"0.69454783",
"0.69454783",
"0.6920555",
"0.69181055",
"0.691467",
"0.6901315",
"0.6898759",
"0.689459",
"0.6889815",
"0.6880676",
"0.6880467",
"0.6880196",
"0.68797004",
"0.6877297",
"0.686924",
"0.6855058",
"0.6851115",
"0.6844058",
"0.6814104",
"0.6803589",
"0.6777842",
"0.6776859",
"0.67678535",
"0.6757897",
"0.67471397",
"0.6738628",
"0.6734963",
"0.6733872",
"0.6720612",
"0.6711659",
"0.6670256",
"0.66581875",
"0.66573423",
"0.6654514",
"0.6638977",
"0.66325235",
"0.66199607",
"0.6615226",
"0.66148156",
"0.65989614",
"0.65910506",
"0.65792614",
"0.6578957",
"0.6573529",
"0.6573351",
"0.6557221",
"0.6553408",
"0.6551572",
"0.65466446",
"0.6540912",
"0.65399504",
"0.6538697",
"0.6535891",
"0.6533581",
"0.6526114",
"0.65116656",
"0.65072525",
"0.6507116",
"0.6503024",
"0.6490388",
"0.6488653",
"0.64881754",
"0.6473845",
"0.64722794",
"0.64702916",
"0.64702916",
"0.6469406",
"0.64682525",
"0.6462379",
"0.64619774",
"0.646129",
"0.6455196",
"0.645272",
"0.6448271",
"0.6447503",
"0.64468706",
"0.64460355",
"0.6441883"
] | 0.0 | -1 |
PUT /users/1 PUT /users/1.json | def update
if @oeuser.id.to_i == params[:id].to_i
@user = User.find(params[:id])
if params[:oldpassword] and params[:newpassword] and params[:confirmpassword]
@user.errors.add(:password, "Current password doesn't match") if not @user.checkpass(params[:oldpassword])
@user.errors.add(:password, "New and confirmation password doesn't match") if not params[:newpassword] == params[:confirmpassword]
if @user.errors.empty?
@user.password = params[:newpassword]
end
end
respond_to do |format|
if @user.errors.empty? and @user.update_attributes(params[:user])
format.html { redirect_to @user, notice: 'User was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
else
flash[:error] = "Restricted Access, You need to be Admin"
redirect_to root_url
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n render json: Users.update(params[\"id\"], params[\"user\"])\n end",
"def update\n render json: User.update(params[\"id\"], params[\"user\"])\n end",
"def UpdateUser params = {}\n \n APICall(path: 'users.json',method: 'PUT',payload: params.to_json)\n \n end",
"def put user_id, options={}, headers={}\n @connection.put \"users/#{user_id}.json\", options, headers\n end",
"def updateUser\n options = {\n :body => params.to_json,\n :headers => {\n 'Content-Type' => 'application/json',\n 'Authorization' => request.headers['Authorization']\n }\n }\n results = HTTParty.put(\"http://192.168.99.101:4051/users/\"+@current_user[\"id\"].to_s, options)\n render json: results.parsed_response, status: results.code\n end",
"def update\n user = User.find_by(id: params[:id])\n user.update(user_params)\n render json: user\n end",
"def update\n @user = User.find(params[:id])\n @user.name = params[:name]\n @user.email = params[:email]\n @user.password = params[:password]\n @user.photo = params[:photo]\n @user.role = params[:type]\n @user.save\n render json:@user\n end",
"def update\n if user.update(user_params)\n render json: user\n else\n render json: {errors: \"Cannot create user\"}, :status => 420\n end\n end",
"def update\n user = @user_service.update_user(params[:id])\n render json: user, status: :ok\n end",
"def update_current_logged_in_user(args = {}) \n put(\"/users.json/current\", args)\nend",
"def modify_user(user)\n query_api_object Model::User, '/rest/user', user.to_hash, 'PUT'\n end",
"def update \n user = User.find(params[:id])\n # byebug\n user.update(user_params)\n\n render json: user\n end",
"def update\n @user = User.find(params[:id])\n @user.name = params[:name]\n @user.email = params[:email]\n @user.password = params[:password]\n @user.photo = params[:photo]\n @user.save\n render json:@user\n end",
"def update\n user = find_user\n user.update!(user_params)\n render json: user\n end",
"def update\n user = User.find(params[:id])\n\n # Use update with user_params to do a mass-assignment update and save. \n if user.update_attributes(user_params)\n render json: user\n else \n render json: user.errors.full_messages, status: :unprocessable_entity\n end\n end",
"def update_user(user, options = {})\n put \"/users/#{user}\", options\n end",
"def update_user(options)\n patch(\"/user\", options, 3)\n end",
"def modify_user(user)\n query_api_object User, \"/rest/user\", user.dump(), \"PUT\"\n end",
"def update\n begin\n user = User.find(params[:user_id])\n if user.update(user_params)\n render json: { users: user }, status: :ok\n else\n render json: { errors: user.errors.messages }, status: 422\n end\n rescue => e\n render json: { errors: e.message }, status: 404\n end\n end",
"def update\n if @api_v1_user.update(api_v1_user_params)\n head :no_content\n else\n render json: @api_v1_user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params)\n render json:@user\n else\n render json: { error: {code: 404, message: 'Invalid user' }}, status: :not_found\n end\n end",
"def update\n user = User.find(params[:id])\n user.update(user_params)\n if user.valid?\n render json: user\n else\n render json: user.errors\n end\n end",
"def update\n if @user.update(user_params)\n render json: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @user.update(user_params)\n render json: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update(id, params = {})\n request(:put, \"/users/#{id}\", body: params)\n end",
"def update\n if @user.update(user_params)\n render json: @user\n else\n render json: {error: \"Could not update user\"}\n end\n end",
"def update\n \trespond_to do |format|\n if @user.update(user_params)\n format.json { render json: @user }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n\t \t\n end",
"def update\n user = User.find(params[:id])\n if user.update(user_params)\n render json: user\n else\n render json: user.errors.full_messages\n end\n end",
"def update\n if @user.update(user_params)\n render json: @user, status: 200\n else\n render json: @user.errors, status: 422\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update_attributes(params[:user])\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update_attributes(params[:user])\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update user_params(params[:user])\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(params[:user])\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id]) \n \n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to users_url, notice: 'User #{@user.name} was successfully created.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @user.update(user_params)\n render json: @user, status: :ok\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n @user.update(user_params)\n render json: @current_user\n end",
"def update\n user = User.find(params[:id])\n if user.update(params_user)\n render json: user, status: 200\n else\n render json: user.errors, status: 422\n end\n\n end",
"def update\n\t\tif @user.update(user_params)\n\t\t\trender json: @user\n\t\telse\n\t\t\trender json: @user.errors, status: :unprocessable_entity\n\t\tend\n\tend",
"def update\n @user.update(user_params_update)\n json_response(@user)\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params(params[:user]))\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @user.update(user_params)\n render json: @user, status: :ok, location: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if user.update(user_params)\n render json: user, status: :ok\n else\n format.json { render json: user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_users_password(args = {}) \n put(\"/users.json/backoffice/#{args[:userId]}/password/#{args[:password]}\", args)\nend",
"def update_users_password(args = {}) \n put(\"/users.json/backoffice/#{args[:userId]}/password/#{args[:password]}\", args)\nend",
"def update_user\n @user = User.find(params[:id])\n if @user.update(user_params)\n head :no_content\n else\n render json: @user.errors, status :unprocessable_entity\n end\n end",
"def update\n user = User.find(params[:id])\n render json: { status: 200, msg: 'User details have been updated.' } if user.update(user_params)\n end",
"def update\n @user = User.find(params[:id])\n @user.update_attributes(params[:user])\n respond_with @user\n end",
"def update\n @user = User.find(params[:id])\n @user.update_attributes(params[:user])\n respond_with @user\n end",
"def update\n @user = V1::User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n flash[:notice] = 'V1::User was successfully updated.'\n format.html { redirect_to(@user) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params)\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params)\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @user.id == current_api_user.id\n if @user.update(user_params)\n render json: @user.as_json(except: [:updated_at]), status: :ok\n else\n render json: @user.errors, status: :bad_request\n end\n else\n render json: '', status: :forbidden\n end\n end",
"def update\n @user = User.find(params[:id])\n if @user.update(user_params)\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params(params))\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params(params))\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update \n @current_user.update(user_params)\n render json: @current_user\n end",
"def update\n @user = User.find(params[:id])\n if @user.update(user_params(params))\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = selected_user\n if @user.update(users_params)\n render 'api/users/show'\n else\n render json: @user.errors.full_messages, status: 422\n end\n end",
"def update\n @user = User.find(params[:id])\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.json { head :ok }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user = current_api_user\n unless @user.update(user_params)\n render json: { error: @user.errors.full_messages.to_sentence }, status: :not_found\n end\n end",
"def update\n respond_to do |format|\n if @user.update(form_params)\n format.json { render json: { users: @user }, status: :ok, location: @user }\n else\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def edit(id, options={})\n request(:put, \"/users/#{id}.json\", default_params(options))\n end",
"def update_user(id, accountId, model) path = \"/api/v2/accounts/#{accountId}/users/#{id}\"\n put(path, model, {}, AvaTax::VERSION) end",
"def update\n user = User.find(params[:id])\n\n user.attributes = {\n name: params[:name]\n }\n\n user_save user\n end",
"def update\n @user = current_org.users.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user, notice: 'user was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user.update(user_params)\n respond_with @user\n end",
"def update\n user = User.find(params[:id])\n if user.update(user_params)\n render json: {\n status: 'OK',\n msg: 'User details have been updated.',\n error: 'nil'\n }, status: :accepted\n else\n not_good(406)\n end\n end",
"def update\n respond_to do |format|\n if @v1_user.update(v1_user_params)\n format.html { redirect_to @v1_user, notice: 'User was successfully updated.' }\n format.json { render :show, status: :ok, location: @v1_user }\n else\n format.html { render :edit }\n format.json { render json: @v1_user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n authorize @user\n\n if @user.update(user_params)\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n\t\tif @user.update(users_params)\n \t\tjson_response(@user, \"User Update Successfully.\")\n \telse\n \t\trender json: {message: @user.errors.full_messages.join(\" \")}, status: 400\n \tend\n\tend",
"def update\n @user = current_user\n if @user.update(update_user_params)\n render 'api/v1/users/show'\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { render action: \"edit\"}\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n \n end",
"def update(context, name, should)\n res = context.transport.put_request(context, \"security/users/#{name}\", keys_to_camelcase(should))\n\n context.err(name, res.body) unless res.success?\n end",
"def update!(options: {})\n\t\t\tuser = User.perform_request User.api_url(\"users/#{id}\"), :put, options, true\n\n\t\t\tif user\n\t\t\t\toptions.each do |key, value|\n\t\t\t\t\tself.send(\"#{key}=\", user['data'][\"#{key}\"])\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tnil\n\t\t\tend\n\t\tend",
"def update\n @user = user.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user, notice: 'user was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @todo = Todo.find(params[:todo][:id])\n if @todo.update_attributes(user_params)\n render json: @todo\n else\n render nothing: true, status: :bad_request\n end\n end",
"def update\n respond_to do |format|\n if @user.update(user_params)\n format.html { redirect_to users_path }\n format.json { render :json => @user }\n else\n format.html { render :edit }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_current_logged_in_users_password(args = {}) \n put(\"/users.json/current/password\", args)\nend",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to users_url, notice: 'User was successfully updated.' }\n\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_name(user_id:, name:)\n path = '/users/{userId}/name'\n .gsub('{userId}', user_id)\n\n if user_id.nil?\n raise Appwrite::Exception.new('Missing required parameter: \"userId\"')\n end\n\n if name.nil?\n raise Appwrite::Exception.new('Missing required parameter: \"name\"')\n end\n\n params = {\n name: name,\n }\n \n headers = {\n \"content-type\": 'application/json',\n }\n\n @client.call(\n method: 'PATCH',\n path: path,\n headers: headers,\n params: params,\n response_type: Models::User\n )\n end",
"def update_current_logged_in_user(args = {}) \n id = args['id']\n temp_path = \"/users.json/current\"\n path = temp_path\nargs.keys.each do |key|\n if (key == \"userId\")\n args.delete(key)\n path = temp_path.gsub(\"{#{key}}\", id)\n end\nend\n puts \" PATH : #{path}\"\n put(path, args)\nend",
"def update_user\n @user = User.find(params[:id])\n @user.update(params[:user])\n redirect \"/users/#{@user.id}\"\nend",
"def update\n @api_user = ApiUser.find(params[:id])\n\n if @api_user.update(api_user_params)\n head :no_content\n else\n render json: @api_user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to users_url, notice: 'User was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_user\n end",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user.as_json(user: current_user), notice: 'User was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.update(params[:user])\n end",
"def update\n @user = User.find(params[:id])\n\n if @user.update(user_params)\n head :no_content\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update \n user = User.where(:id => current_user.user)\n if user.update(user_params)\n render :json => {:user => user }\n else\n render :json => {:error => user.errors.full_messages.first}\n end\nend",
"def update\n @user = User.find(params[:id])\n \n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user, :notice => 'User was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n if @user.update(user_params)\n render json: @user, status: :ok, location: api_application_user_path(@application,@user)\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to users_path, :notice => 'User was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes_from_api(params[:user])\n format.html { redirect_to @user, :notice => 'User was successfully updated.' }\n format.json { render_for_api :user, :json => @user }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user, :notice => t('user.update_success') }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @user.errors, :status=> :unprocessable_entity }\n end\n end\n end",
"def api_v11_users_user_name_put_with_http_info(user_name, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: DefaultApi#api_v11_users_user_name_put ...\"\n end\n \n # verify the required parameter 'user_name' is set\n fail \"Missing the required parameter 'user_name' when calling api_v11_users_user_name_put\" if user_name.nil?\n \n # resource path\n path = \"/api/v11/users/{userName}\".sub('{format}','json').sub('{' + 'userName' + '}', user_name.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = []\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = []\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n\n auth_names = []\n data, status_code, headers = @api_client.call_api(:PUT, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DefaultApi#api_v11_users_user_name_put\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_user\n user = current_user\n if user.update(update_params)\n render json: { status: { updated: \"Ok\" } }\n else\n render json: user.errors.full_messages\n end\n end",
"def update\n @user = User.find(params[:id])\n if @user.update_attributes(user_params)\n redirect_to @user\n else\n format.html { render :edit }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\nend",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user, notice: 'User was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user = ::User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n format.html { redirect_to @user, notice: 'User was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n\n respond_to do |format|\n if @user.update_attributes(params[:user])\n flash[:notice] = \"User #{@user.username} successfully updated!\"\n format.html { redirect_to @user }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n user = User.find(params[:id])\n authorize! :update, user\n if user.update_attributes(user_params)\n render :json => {:ok => true, :message => 'successful updated'}, :head => :no_content\n else\n render :json => {:ok => false, :message => user.errors}, :status => :unprocessable_entity\n end\n end"
] | [
"0.74114245",
"0.73920554",
"0.73041475",
"0.7254177",
"0.7202618",
"0.70756376",
"0.70535713",
"0.7029043",
"0.70075685",
"0.69883573",
"0.6983195",
"0.694263",
"0.69409895",
"0.692315",
"0.6909438",
"0.687742",
"0.68486536",
"0.6834162",
"0.6821841",
"0.6801179",
"0.67703044",
"0.6763487",
"0.6761313",
"0.6761313",
"0.67482305",
"0.67473894",
"0.6713073",
"0.6703807",
"0.6693307",
"0.66886777",
"0.66886777",
"0.66646844",
"0.66617274",
"0.66572624",
"0.6653578",
"0.66406506",
"0.6625279",
"0.66213304",
"0.66192704",
"0.6614916",
"0.6612626",
"0.6604333",
"0.65851104",
"0.65851104",
"0.65785134",
"0.65615654",
"0.65518224",
"0.65518224",
"0.6549094",
"0.6530534",
"0.6530534",
"0.65275174",
"0.6523527",
"0.6520384",
"0.6520384",
"0.6516204",
"0.65145653",
"0.65104014",
"0.6504922",
"0.6499594",
"0.64987266",
"0.64906204",
"0.64810187",
"0.64798295",
"0.64702576",
"0.64496434",
"0.6436427",
"0.6433962",
"0.64330167",
"0.6428237",
"0.6406415",
"0.6402615",
"0.6399288",
"0.63881207",
"0.63877773",
"0.6353986",
"0.63537806",
"0.633806",
"0.63360107",
"0.6334851",
"0.632672",
"0.63260114",
"0.63179153",
"0.63173646",
"0.6317282",
"0.6316377",
"0.6316055",
"0.63120025",
"0.6293317",
"0.62857985",
"0.6282219",
"0.6280316",
"0.6264061",
"0.62624925",
"0.625522",
"0.62549126",
"0.62547195",
"0.625327",
"0.625269",
"0.6252329",
"0.6245382"
] | 0.0 | -1 |
DELETE /users/1 DELETE /users/1.json | def destroy
@user = User.find(params[:id])
@user.destroy
respond_to do |format|
format.html { redirect_to users_url }
format.json { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def DeleteUser id\n \n APICall(path: \"users/#{id}.json\",method: 'DELETE')\n \n end",
"def delete\n render json: User.delete(params[\"id\"])\n end",
"def delete(id)\n request(:delete, \"/users/#{id}.json\")\n end",
"def delete\n render json: Users.delete(params[\"id\"])\n end",
"def delete\n @user.destroy\n respond_to do |format|\n format.html { redirect_to v1_resources_users_all_path, notice: 'User was deleted.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n render json:@user\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n render json:@user\n end",
"def destroy\n @user = V1::User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to(v1_users_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n \"\"\"\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n \"\"\"\n end",
"def destroy\n debugger\n @user.destroy\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user.destroy\n format.json { head :no_content }\n end",
"def destroy\n user = User.find(params[:id]) # from url, nothing to do with table\n user.destroy\n render json: user\n end",
"def destroy\n @user.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find_by_id_or_username params[:id]\n @user.destroy\n render api_delete @user\n end",
"def destroy\n @user = user.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end",
"def delete_user\n @user = User.find(params[:id])\n if @user.destroy\n render :json => @user\n else\n render :json => @user.errors.full_messages\n end\n end",
"def destroy\n @v1_user.destroy\n respond_to do |format|\n format.html { redirect_to v1_users_url, notice: 'User was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n \n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = User.find(params[:id])\n @user.destroy\n\n respond_to do |format|\n format.html { redirect_to users_url }\n format.json { head :no_content }\n end\n end"
] | [
"0.78750724",
"0.77518034",
"0.7713981",
"0.7610077",
"0.747295",
"0.74073994",
"0.74073994",
"0.7369968",
"0.7346072",
"0.7340465",
"0.7328618",
"0.7309635",
"0.73095363",
"0.7306841",
"0.7297868",
"0.72917855",
"0.7291585",
"0.7289111",
"0.7284347",
"0.7245172",
"0.7242216",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177",
"0.7232177"
] | 0.7250935 | 33 |
TODO: probably time to move the following logic to service | def cash
order = Order.find(params[:data][:attributes][:order_id])
payment = Payment.new do |p|
p.order = order
p.customer = order.customer
p.vendor = order.vendor
p.order_total = order.total
p.paid_amount = 0 # NOTE: amount passed through payment gateway, thus 0,
p.credits_amount = 0
p.payment_method = ::API::V1::Support::PaymentMethodCaclulator.calculate(p)
end
if payment.save && cash_order_state_changed?(order)
render json: {}, status: 201
else
head 422
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def private; end",
"def strategy; end",
"def service; end",
"def service_request(service); end",
"def implementation; end",
"def implementation; end",
"def bi_service\n end",
"def refutal()\n end",
"def probers; end",
"def operations; end",
"def operations; end",
"def service; raise NotImplementedError; end",
"def processor; end",
"def schubert; end",
"def response_from_service\n\n end",
"def executor; end",
"def executor; end",
"def executor; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def resolver; end",
"def post_process; end",
"def sitemaps; end",
"def apply\n\t\t\n\tend",
"def apply\n\t\t\n\tend",
"def retire\n\n end",
"def get()\n \n end",
"def suivre; end",
"def perform\n \n end",
"def apis; end",
"def weber; end",
"def services\n\n end",
"def prepare_result; end",
"def services\n end",
"def private_method\n end",
"def fetch; end",
"def fetch; end",
"def across_service_state\n super\n end",
"def identify; end",
"def who_we_are\r\n end",
"def provider; end",
"def respond(); end",
"def operation; end",
"def calls; end",
"def calls; end",
"def post_reader; end",
"def handle; end",
"def intensifier; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def request; end",
"def internal; end",
"def get; end",
"def lookup_context; end",
"def lookup_context; end",
"def lookup_context; end",
"def common\n \n end",
"def read(_request)\n raise NotImplementedError, 'Implement this method in your strategy'\n end",
"def process; end",
"def process; end",
"def process; end",
"def process; end",
"def process; end",
"def process; end",
"def process; end",
"def process; end",
"def stored_data; end",
"def request_result\n \n end",
"def fetch\n end",
"def call\n\n\tend",
"def call\n\n\tend",
"def prepareForReuse; end",
"def execute; end",
"def execute; end",
"def fetch\n raise \"not implemented\"\n end",
"def req\n \n end",
"def query; end",
"def fetch\n raise NotImplementedError\n end",
"def format_service\n\n end",
"def call\n # implement in subclasses\n end",
"def run\n super\n\n #get account_name\n account_name = _get_entity_name\n entity_type = _get_entity_type_string\n\n # blob Url\n blob_url = 'blob.core.windows.net'\n\n # Lists\n valid_account_list = []\n\n # get entity details\n if entity_type == \"Domain\"\n account_name = account_name.split('.')[0]\n _log \"Searching results for #{account_name} ...\"\n elsif entity_type == \"UniqueKeyword\"\n _log \"Searching results for #{account_name} ...\"\n else\n _log_error \"Unsupported entity type\"\n end\n\n # Generate blob brute force list\n # Brute force Azure server, get valid accounts for open Blobs and create issues\n valid_account_list = brute_force_blob account_name, blob_url\n\n # Generate container brute force list\n # Brute force Azure cntainers for listing exposed files and create issues\n brute_force_container valid_account_list\n\n end",
"def relatorios\n end"
] | [
"0.6945658",
"0.60371804",
"0.60099244",
"0.5861514",
"0.5819279",
"0.5819279",
"0.5667633",
"0.5659446",
"0.56236804",
"0.56230736",
"0.56230736",
"0.5590478",
"0.5584909",
"0.5564423",
"0.556105",
"0.5556264",
"0.5556264",
"0.5556264",
"0.551728",
"0.551728",
"0.551728",
"0.551728",
"0.550961",
"0.5506859",
"0.54732084",
"0.5464868",
"0.5464868",
"0.54595274",
"0.5434942",
"0.5428376",
"0.5424371",
"0.5404163",
"0.53976095",
"0.53868747",
"0.53704995",
"0.53588325",
"0.53579366",
"0.5344145",
"0.5344145",
"0.5341909",
"0.5331005",
"0.532897",
"0.5288041",
"0.528108",
"0.527869",
"0.5266717",
"0.5266717",
"0.5253295",
"0.5243128",
"0.5241818",
"0.523722",
"0.523722",
"0.523722",
"0.523722",
"0.523722",
"0.523722",
"0.523722",
"0.523722",
"0.523722",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52368796",
"0.52230835",
"0.5216851",
"0.5211991",
"0.5211991",
"0.5211991",
"0.5193155",
"0.5170813",
"0.5161977",
"0.5161977",
"0.5161977",
"0.5161977",
"0.5161977",
"0.5161977",
"0.5161977",
"0.5161977",
"0.51598537",
"0.51505315",
"0.51463777",
"0.51423955",
"0.51423955",
"0.51418203",
"0.51398915",
"0.51398915",
"0.5128747",
"0.51257646",
"0.51200587",
"0.5112616",
"0.5112451",
"0.5109836",
"0.5108379",
"0.51073503"
] | 0.0 | -1 |
todo list DOWN HERE : 1. getting params DONE 2. filter params 3. create roles 4. create roles_menus 5. redirect GLOBAL PARAMS AND VARIABLE : | def paramsr
@hotel_id = params[:hotel_id]
@id = params[:id]
@role_id = params[:role_id]
@package_id = Hotel.where(id: @hotel_id).first.package_id
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n submenu_item 'role-index'\n @roles = Role.paginate_by_sql(\"select t1.role_id id,t3.role_name,t3.description from\n (select a1.role_id,count(*) permission_num from roles_permissions a1\n where a1.permission_id in (select permission_id from roles_permissions where role_id =#{@current_user.role_id})\n group by a1.role_id) t1,\n (select role_id,count(*) permission_num from roles_permissions where role_id > 1 group by role_id) t2,\n roles t3\n where t1.permission_num = t2.permission_num and t1.role_id = t2.role_id\n and t1.role_id = t3.id \",:page => params[:page], :per_page => 30)\n\n end",
"def crear\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_crear = false\n @security_role_type = Security::RoleType.find_by(name: \"Crear\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type| \n if @security_role_type == role_type.security_role_type.name\n @permiso_crear = true\n break\n end\n end\n elsif params[:controller] == \"security/role_type_menus\"\n params[:controller] = \"security/roles\"\n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_crear = true\n break\n end\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_crear = true\n end\n if params[:action] == \"new\" && @permiso_crear == false\n redirect_to root_path\n end\n return @permiso_crear\n end\n end\n end",
"def redirect\n if params[:role] == \"restaurant\"\n redirect_to :action => \"new\", :invitation => { :first_name => params[:first_name], :last_name => params[:last_name], :email => params[:email] }\n elsif params[:role] == \"media\"\n redirect_to :controller => \"mediafeed/media_users\", :action => \"new\", :user => { :first_name => params[:first_name], :last_name => params[:last_name], :email => params[:email] }\n end\n end",
"def authorized_for_roles(*args)\n # From: http://stackoverflow.com/a/6076035/999973\n # args.any? { |role_name| ROLES.include? role_name }\n # ROLES = %w[admin moderator editor author banned] in user model\n # calling it:\n # before_filter(only: [:edit, :update, :destroy]) {|c| c.authorized_for_roles \"admin\", \"editor\"}\n \n # args.any? { |role_name| current_user.role == role_name }\n \n\n unless signed_in?\n self.current_user = User.create( name: \"Guest\" )\n redirect_to(root_path) unless args.any? { |role_name| current_user.role == role_name }\n self.current_user = nil\n return\n end\n\n redirect_to(root_path) unless args.any? { |role_name| current_user.role == role_name }\n end",
"def create\n @user = User.where(:name => params[:user][:name]).first\n if @user.nil?\n @user = User.new\n flash[:notice] = '用户不存在!'\n respond_to do |format|\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n return\n end\n \n if @user.admin\n @user = User.new\n flash[:notice] = '用户已经是管理员!'\n respond_to do |format|\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n return\n end\n \n select_roles = params[:user_roles]\n select_roles.each do |role_id|\n @user.user_roles.create(:role_id => role_id)\n end unless select_roles.nil?\n \n @user.admin = true\n \n respond_to do |format|\n if @user.save\n @user.roles.joins(:permissions).select('permissions.controller_name,permissions.action_name,permissions.rest,roles.app_id').each do |record|\n UserVisit.create :controller_name => record.controller_name, :action_name => record.action_name, :rest => record.rest, :app_id => record.app_id, :user_id => @user.id\n end\n format.html { redirect_to admin_role_path(@user), notice: '权限新建成功.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def initialize_menus\n return if %w[search upload].include?(params[:controller].to_s.downcase)\n ss = session_section\n SEARCH_KEYS.each do |key|\n ss_value = ss[key.to_s]\n if ss_value.present?\n if key == :sort\n set_sort_params(ss_value)\n else\n params[key] = ss_value\n end\n elsif key == :sort\n SEARCH_SORT_KEYS.each do |k|\n v = ss[k.to_s]\n params[k] = v if v.present?\n end\n end\n end\n end",
"def index\n # The hook below can access controller's instance variables.\n if current_user\n if current_user.role?:livia_admin\n redirect_to companies_url\n elsif is_secretary_or_team_manager?\n redirect_to \"/wfm/notes\"\n elsif current_user.end_user\n redirect_to physical_clientservices_home_index_path\n elsif is_client\n redirect_to matter_clients_url\n elsif current_user.role?:lawfirm_admin\n redirect_to lawfirm_admins_url\n return\n end\n else\n flash[:error] = t(:flash_DB_error)\n redirect_to login_url\n end\n end",
"def usuarios\n redirect_to :action => \"roles\"\n end",
"def select_role\n if params[:user] && params[:user][:role_id]\n stage_one\n render :new\n else\n redirect_to \"/welcome/advertiser\"\n end\n end",
"def modificar\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_modificar = false\n @security_role_type = Security::RoleType.find_by(name: \"Modificar\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_modificar = true\n break\n end\n end\n elsif params[:controller] == \"security/role_type_menus\"\n params[:controller] = \"security/roles\"\n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_modificar = true\n break\n end\n end\n end\n end\n if params[:controller] == \"service/services\" && current_user.employee.provider_provider_id.nil?\n @permiso_modificar = true\n end\n end\n if current_user.username == \"aadmin\" \n @permiso_modificar = true\n end\n\n if params[:action] == \"edit\" && @permiso_modificar == false\n redirect_to root_path\n end\n return @permiso_modificar\n end\n end\n end",
"def permiso_anular\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_anular = false\n @security_role_type = Security::RoleType.find_by(name: \"Anular\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_anular = true\n break\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_anular = true\n end\n if params[:action] == \"anular\" && @permiso_anular == false\n redirect_to root_path\n end\n return @permiso_anular\n end\n end\n end",
"def mod_all\n role = Role.find(params[:id])\n user = User.find(params[:role][:user_id])\n user.roles << role\n\n flash[:notice] = \"La modificacion ha sido realizada correctamente.\"\n\n redirect_to :back\n end",
"def create\n @company = Company.find(params[:company_id])\n @role = Role.find(params[:role_id])\n access_right_hash = params[:access_right]\n \n if current_user.super_admin\n is_ok = true\n else\n current_user.roles.each { |r|\n r.access_rights.each { |ar|\n puts access_right_hash['model_name']\n if ar.model_name == access_right_hash['model_name'] && ar.action == access_right_hash['action']\n is_ok = true\n end\n }\n }\n end\n \n respond_to do |format|\n if is_ok\n @access_right = @role.access_rights.create(params[:access_right])\n @access_right.company_id = current_user.company_id\n @access_right.save\n format.html { redirect_to company_role_path(@company, @role) }\n else\n format.html { redirect_to company_role_path(@company, @role), notice: 'Usted no puede conceder este permiso.' }\n end\n end\n end",
"def index\n return_path users_path # !!! same as line 10?\n# @is_first_user = User.first.id == 1\n# session[:go_to_after_edit] = users_path\n @users = User.search(params[:search]).order(sort_column + \" \" + sort_direction).page(params[:page]).per(25)\n @menu='admin'\n end",
"def create\n if params and params[:user]\n @role = params[:user][:role] \n \n user_session[\"role\"] = @role if user_session\n end\n super\n end",
"def index\n if current_user.rol == 1\n @roles = Role.order(:id)\n @role = Role.new\n else\n @mensaje = \"Seccion solo para administrador\"\n end\n end",
"def create\n @user = User.new(user_params)\n\n if roles = params[:user][:roles]\n roles.map { |r| r.downcase }.each do |role|\n unless role.empty?\n @user.roles << Role.new(type: role)\n\n if role == \"admin\"\n respond_to do |format|\n if @user.save\n format.html { redirect_to (flash[:redirect] || :attendees), notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end\n\n if role == \"staff\"\n redirect_to get_staff_list_path\n end\n\n end\n end\n end\n end",
"def check_user_role \t \n redirect_to root_path unless current_user.roles.first.name == \"empleado\" or current_user.roles.first.name == \"supervisor\"or current_user.roles.first.name == \"admin\" \n end",
"def create\n submenu_item 'role_new'\n load_permissions\n ids=params[:permissions].select{|k,v| v=='1'}.map { |k,v| k.to_i } unless params[:permissions].nil?\n if ids.length > 0\n permissions=Permission.find(:all, :conditions => [\"id in (#{ids.join(',')})\"])\n params[:role][:permissions] = permissions\n @role = Role.new(params[:role])\n if @role.save\n flash[:notice] = \"创建角色成功\"\n redirect_to :action => 'index'\n else\n flash[:error] = \"创建角色失败\"\n render :action => 'new'\n end\n else\n flash[:error] = \"角色名或权限不能为空\"\n redirect_to :action => 'new'\n end\n\n end",
"def set_menu\n @food_cart = FoodCart.find(params[:id])\n @menu = @food_cart.menu\n # @menu = Menu.find(params[:id])\n authorize @menu\n end",
"def consultar\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_consultar = false\n @security_role_type = Security::RoleType.find_by(name: \"Consultar\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name || role_type.security_role_type.name == \"Pagar\"\n @permiso_consultar = true\n break\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_consultar = true\n end\n if params[:action] == \"show\" && @permiso_consultar == false\n redirect_to root_path\n end\n return @permiso_consultar\n end\n end\n end",
"def admin_menu\n if session[:user_id] and session[:position]\n @admin_user = AdminUser.find_by_id(session[:user_id])\n\n @admin_expenses = Expense.where(:admin_user_id => @admin_user.id)\n\n # the following conditional sets the permissions\n if @admin_user.position.to_s == \"ManagerSnr\"\n flash[:notice] == \"#{@admin_user.first_name} is logged in \"\n @manager_permission = AdminUser.find_by_id(session[:user_id])\n\n elsif @admin_user.position.to_s == \"Manager17\"\n flash[:notice] == \"#{@admin_user.first_name} is logged in \"\n @manager_permission = AdminUser.find_by_id(session[:user_id])\n\n elsif @admin_user.position.to_s == \"Manager15\"\n flash[:notice] == \"#{@admin_user.first_name} is logged in \"\n @manager_permission = AdminUser.find_by_id(session[:user_id])\n\n elsif @admin_user.position.to_s == \"Manager12\"\n flash[:notice] == \"#{@admin_user.first_name} is logged in \"\n @manager_permission = AdminUser.find_by_id(session[:user_id])\n\n elsif @admin_user.position.to_s == \"Secretary\" or \"Treasurer\" or \"Chairman\"\n flash[:notice] == \"#{@admin_user.first_name} is logged in\"\n @admin_permission = AdminUser.find_by_id(session[:user_id])\n\n # superuser has full access\n elsif @admin_user.position.to_s == \"Administrator\"\n flash[:notice] == \"#{@admin_user.first_name} is logged in\"\n @super_user = AdminUser.find_by_id(session[:user_id])\n end\n end\n end",
"def index\n @roles = Role.all.paginate(:page => params[:page], :per_page => 10).order('id desc')\n \n \n if params[:count]\n params[:count]\n else\n params[:count] = 10\n end\n \n if params[:page]\n page = params[:page].to_i\n else\n page = 1\n end\n \n if params[:per_page].present?\n # perpage = params[:per_page]\n @per_page = params[:per_page] || Role.per_page || 10\n @roles = Role.paginate( :per_page => @per_page, :page => params[:page])\n else\n perpage = 10\n end\n @per_page = params[:per_page] || Role.per_page || 10\n page = if params[:page]\n params[:page].to_i\n else\n 1\n end\n \n\n \n# Filter by human part\nif params[:name]\n @name = params[:name]\n logger.info \"Then part is #{@name.inspect}\"\n @roles = Role.where(name: @name).paginate( :page => params[:page], :per_page => 10).order('id desc') \nend\n\n\n \n# \n # per_page = 5\n# \n # offset = (page - 1) * per_page\n # limit = page * per_page\n # @array = *(offset...limit)\n\n\n # if params[:search_value] && params[:search_value].strip != ''\n# \n # if params[:search_param] == 'firstname'\n # logger.info \"the code comes to if firstname............\"\n # @contacts = Contact.firstname_search(params[:search_value].strip).paginate(page: params[:page], per_page: params[:count]).order('ID asc')\n# \n # elsif params[:search_param] == 'lastname'\n # logger.info \"the code comes to elsif lastname.............\"\n # @contacts = Contact.lastname_search(params[:search_value].strip).paginate(page: params[:page], per_page: params[:count]).order('ID asc')\n# \n # else\n # logger.info \"the code comes to the else....\"\n # @contacts = Contact.paginate(page: params[:page], per_page: params[:count]).order('ID desc')\n # @search_json = []\n # end\n# \n # elsif params[:search_param] == 'date'\n # logger.info \"the code comes to elsif date.............\"\n# \n # start = (params[\"start_date\"] + \" \" + \"0:00:00\")# Time.zone.parse(params[\"start_date\"].to_s + \" \" + \"0:00:00\").utc # params[\"start_date\"].to_s + \"0:00:00\"\n # ended = params[\"end_date\"] + \" \" + (\"23:59:59\") # Time.zone.parse(params[\"end_date\"].to_s + \" \" + \"23:59:59\").utc # params[\"end_date\"].to_s + \"23:59:59\"\n # @contacts = Contact.search_date(start,ended).paginate(page: params[:page], per_page: params[:count]).order('ID asc')\n# \n# \n # end\n # p \"JSON ARRAY: #{@search_json}\"\n# \n \n respond_to do |format|\n logger.info \"what is the url calling this??: ans #{request.referer}\"\n # format.js\n format.html\n format.csv { send_data @roles.to_csv(options = {}, page, perpage)}\n format.xls { send_data @roles.to_csv(options={col_sep: \"\\t\"}, page, perpage)}\n end\n end",
"def level_to_admin\r\n Admin.to_admin params[:id]\r\n redirect_to :action => 'show_admins' \r\n end",
"def update\n authorize(current_user)\n role = params[:user][:role_ids]\n roleModel =Role.find( role)\n if @user.setRole roleModel.name\n @user.save\n redirect_to users_path, :notice => \"Rolle geändert\"\n else\n redirect_to users_path, :notice => \"Rolle nicht geändert\"\n end\n end",
"def index\n @users = User.order(:name)\n if( !current_user.isadmin() )\n @users = @users.select { |u| u.isplayer(u.id) == true }\n end\n # print('heloo')\n case params[:format]\n when 'staff'\n @users = @users.select { |u| u.canrole == 'staff' } + @users.select { |u| u.canrole == 'medical' } + @users.select { |u| u.canrole == 'admin' } + @users.select { |u| u.canrole == 'staffpremiere' }\n when 'premiere'\n @users = @users.select { |u| u.canrole == 'premiere' }\n when 'staffpremiere'\n @users = @users.select { |u| u.canrole == 'satffpremiere' }\n when 'm21'\n @users = @users.select { |u| u.canrole == 'm21' }\n when 'm18'\n @users = @users.select { |u| u.canrole == 'm18' }\n when 'm16'\n @users = @users.select { |u| u.canrole == 'm16' }\n when 'm15'\n @users = @users.select { |u| u.canrole == 'm15' }\n when 'fe14'\n @users = @users.select { |u| u.canrole == 'fe14' }\n when 'fe13'\n @users = @users.select { |u| u.canrole == 'fe13' }\n when 'fe12'\n @users = @users.select { |u| u.canrole == 'fe12' }\n when 'fe11'\n @users = @users.select { |u| u.canrole == 'fe11' }\n when 'fc10'\n @users = @users.select { |u| u.canrole == 'fc10' }\n when 'fc9'\n @users = @users.select { |u| u.canrole == 'fc9' }\n when 'fc8'\n @users = @users.select { |u| u.canrole == 'fc8' }\n when 'fc7'\n @users = @users.select { |u| u.canrole == 'fc7' }\n when 'archived'\n @users = @users.select { |u| u.canrole == 'archived' }\n end\n end",
"def create\n @current_admin_user = current_admin_user\n session[:menu_params].deep_merge!(params[:menu]) if params[:menu]\n @menu = Menu.new(session[:menu_params])\n @categories = @menu.categories\n @menu.current_step = session[:menu_step]\n @selected_type = @menu.menu_type\n puts \"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #{@selected_type}\"\n if params[:back_button]\n @menu.previous_step\n elsif @menu.last_step?\n @menu.save\n else\n @menu.next_step\n end\n session[:menu_step] = @menu.current_step\n if @menu.new_record?\n render \"new\"\n else\n session[:menu_steps]=session[:menu_params] = nil\n redirect_to @menu, notice: \"Menu was successfully created.\"\n end\n end",
"def update\n @user = User.find(params[:id])\n @user.transaction do\n @user.user_roles.destroy_all\n select_roles = params[:user_roles]\n select_roles.each do |role_id|\n @user.user_roles.create(:role_id => role_id)\n end unless select_roles.nil?\n respond_to do |format|\n if @user.save!\n @user.user_visits.destroy_all\n @user.roles.joins(:permissions).select('permissions.controller_name,permissions.action_name,permissions.rest,roles.app_id').each do |record|\n UserVisit.create :controller_name => record.controller_name, :action_name => record.action_name, :rest => record.rest, :app_id => record.app_id, :user_id => @user.id\n end\n format.html { redirect_to admin_role_url(@user), notice: '权限修改成功.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def admin_required\n #just a patch until i can implement users and roles\n \n # \"Speak friend, and enter\" (Moria)\n session[:friend] ||= params[:friend]\n session[:friend] || redirect_to(home_path)\n end",
"def role_menu_params\n params.require(:role_menu).permit(:name, :role_id, :is_used, :menu_id)\n end",
"def reenviar_usuario\n @bandera = false\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_reenviar_usuario = false\n @security_role_type = Security::RoleType.find_by(name: \"Reenviar Usuario\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_reenviar_usuario = true\n break\n elsif role_type.security_role_type.name == \"Consultar\"\n @bandera = true\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_reenviar_usuario = true\n end\n if @bandera == true\n elsif params[:action] == \"forget_username_list\" && @permiso_reenviar_usuario == false\n redirect_to root_path\n end\n return @permiso_reenviar_usuario\n end\n end\n end",
"def create\n @manage_admin = Manage::Admin.new(manage_admin_params)\n @admin_roles= @manage_admin.roles\n respond_to do |format|\n if @manage_admin.save\n\n # 保存角色信息\n roles_id=params[:roles]\n @manage_admin.roles_in_id=roles_id\n\n format.html { redirect_to @manage_admin, notice: \"成功创建管理员#{@manage_admin.nickname}.\" }\n\n format.json { render :show, status: :created, location: @manage_admin }\n else\n format.html { render :new }\n format.json { render json: @manage_admin.errors, status: :unprocessable_entity }\n end\n end\n end",
"def verificar_permiso\n if user_signed_in?\n unless current_user.employee.nil?\n @existe_permiso = false\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu|\n if security_role_menu.security_menu.controller == params[:controller] \n @existe_permiso = true\n break\n elsif params[:controller] == \"service/services\" and session[:atender] == true\n @existe_permiso = true\n end\n if params[:controller] == \"security/role_type_menus\" || params[:controller] == \"security/role_menus\"\n @existe_permiso = true\n end\n end\n if current_user.username == \"aadmin\"\n @existe_permiso = true\n end\n if @existe_permiso == false\n redirect_to root_path\n end\n end\n end\n end",
"def create\n @roles = Role.paginate :page => params[:page],\n :per_page => 15,\n :order => sort_order('name')\n @role= Role.new(params[:role])\n\n respond_to do |format|\n if @role.save\n format.html { redirect_to(roles_url, :notice => 'New User role successfully added.') }\n format.xml { render :xml => @role, :status => :created, :location => @role }\n else\n format.html { render :action => \"index\" }\n format.xml { render :xml => @role.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def set_possible_roles\n\tif User.has_role Role.ADMINISTRATOR,session[:roles]\n\t @roles=Role.all\n\t return\n\tend\n\n\t@logged_in_user_role_id = UserRoleMap.getRoleidByUserid(session[:session_user])\n\t#@roles = Role.where(:id => RoleReportTo.select(\"user_role_id\").where(:manager_role_id => @logged_in_user_role_id))\n\t@roles = Role.getRolesByRoleid(RoleReportTo.getUserRoleidByManagerRoleid(@logged_in_user_role_id))\n\thas_volunteer=false\n\t@roles.each do |role|\n\t if role.role == Role.VOLUNTEER\n\t\thas_volunteer=true\n\t\tbreak\n\t end\n\tend\n\tunless has_volunteer\n\t @volunteer_role=Role.where(role:Role.VOLUNTEER)\n\t @volunteer_role.each do |role|\n\t\t@roles.push role\n\t end\n\tend\n end",
"def menu\n \tredirect_to session_new_path\n end",
"def create\n\t\tauthorize! :create, AsignacionRol\n @asignacion_rol = AsignacionRol.new(asignacion_rol_params)\n @asignacion_rol.esActual = true\n @asignacion_rol.active = true\n @proyecto = @asignacion_rol.proyecto\n @roles = []\n @rols = Rol.where(tipo_rol: TipoRol.where(nombre:'Proyecto'))\n coor_sist = Rol.where(\"nombre = ?\", 'Coordinador Sistema').first\n asig_func = AsignacionFuncion.where(\"usuario_id = ? AND rol_id = ?\", current_usuario, coor_sist).first\n @rols.each do |rol|\n if rol.nombre != 'Coordinador' || asig_func != nil then\n @roles << rol\n end\n end\n if unica(@asignacion_rol.usuario_id,@asignacion_rol.proyecto_id, @asignacion_rol.rol_id) == true\n\t respond_to do |format|\n\t if @asignacion_rol.save\n sesion= Sesion.find_by(usuario_id: current_usuario.id, fechaFin: nil)\n Transaccion.create!(\n \t\t descripcion: \"Creación asociación rol #{@asignacion_rol.rol.nombre} al usuario #{@asignacion_rol.usuario.nombreUsuario} del proyecto #{@asignacion_rol.proyecto.nombre} : actual = #{ t @asignacion_rol.esActual.to_s}\",\n \t\t sesion_id: sesion.id ,\n \t\t proyecto_id: @asignacion_rol.proyecto.id)\n\t\tformat.html {redirect_to :controller => 'asignacion_roles', :action => 'index',:proyecto_id => @asignacion_rol.proyecto.id } \n\t\tformat.json { render :show, status: :created, location: @asignacion_rol }\n\t else\n params[:usuario_id] = @asignacion_rol.usuario_id\n \t\tformat.html { render :new }\n \t\tformat.json { render json: @asignacion_rol.errors, status: :unprocessable_entity }\n\t end\n\t end\n else\n\t respond_to do |format|\n\t\t format.html { redirect_to :controller => 'asignacion_roles', :action => 'index', :proyecto_id => @asignacion_rol.proyecto.id\n\t\t flash[:danger] = 'El usuario ya se encuentra asignado' } \n\t end\n end\n end",
"def index\n\n @generate_role_links = true\n \n session[:return_to] = {:controller => 'tracker', :action => 'index'}\n flash['notice'] = flash['notice']\n # see if we have a database\n if ! DbCheck.exist?\n render( :action => 'not_configured')\n return\n end\n \n if @logged_in_user && @logged_in_user.active_role\n case @logged_in_user.active_role.name\n when \"Designer\"\n designer_home_setup\n render( :action => 'designer_home' )\n when \"FIR\"\n fir_home_setup\n render( :action => 'fir_home' )\n when \"Reviewer\"\n reviewer_home_setup\n render( :action => 'reviewer_home' )\n when \"Manager\", \"Admin\"\n manager_home_setup\n render 'manager_home'\n when \"PCB Admin\"\n pcb_admin_home_setup\n render( :action => 'pcb_admin_home' )\n when \"Basic User\"\n manager_home_setup\n render( :action => 'basic_user_home')\n else\n reviewer_home_setup\n render( :action => 'reviewer_home' )\n end\n else\n # No user is identified.\n @pcbas = PartNum.get_active_pcbas\n @designs = Design.get_active_designs\n #@designs.delete_if { |d| d.pcb_number }\n @designs = @designs.sort_by { |d| d.pcbas_string }\n \n end\n \n session[:return_to] = {:controller => 'tracker', :action => 'index'}\n\n end",
"def filter_roles\n if params[:user] && params[:user][:roles]\n params[:user][:roles] = params[:user][:roles].map(&:to_i) & current_user.assignable_roles\n end\n end",
"def check_role_update\n unless current_user.is_admin?\n params[:user][:is_admin] = \"0\"\n params[:user][:is_moderator] = \"0\"\n params[:user][:is_sales] = \"0\"\n end\n end",
"def select_user\n authorize_only (:patient) {params[:useredit] == @user.wedgetail} # everyone can only edit themselves\n authorize :admin # apart from admin\n @useredit=User.find_by_wedgetail(params[:useredit])\n @listname=\"greylist\"\n @listname=\"blacklist\" if @useredit.access==2\n @listname=\"whitelist\" if @useredit.access==3\n @choice=User.find_by_wedgetail(params[:wedgetail])\n @ok=Firewall.find(:all,:conditions=>[\"patient_wedgetail=? and user_wedgetail=?\",params[:useredit],params[:wedgetail]])\n if @ok.size>0\n # remove selected\n Firewall.delete_all([\"patient_wedgetail=? and user_wedgetail=?\",params[:useredit],params[:wedgetail]])\n @new_term=\"Add\"\n @choice_name=@choice.family_name_given_names\n else\n # add seleted\n @ok=Firewall.new\n @ok.user_wedgetail=params[:wedgetail]\n @ok.patient_wedgetail=params[:useredit]\n @ok.save\n @new_term=\"Remove\"\n @choice_name=\"<font color='red'>\"+ @choice.family_name_given_names + \"</font>\"\n end\n \n @currentlist=User.find(:all,:conditions=>[\"firewalls.patient_wedgetail='#{@useredit.wedgetail}'\"],:joins=>\"inner join firewalls on users.wedgetail=firewalls.user_wedgetail\")\n \n render :update do |page|\n #page.replace_html \"old_wedge_\"+params[:wedgetail],new_wedgetail\n page.replace_html(\"command_\"+params[:wedgetail],link_to_remote(@new_term, :url => {:action => \"select_user\",:wedgetail=> params[:wedgetail],:useredit=>params[:useredit]}))\n page.replace_html(\"name_\"+params[:wedgetail],@choice_name)\n page.replace_html(\"greylist\",render(:partial => \"firewall_current\"))\n end\n end",
"def index\n if((current_user!=nil && current_user.rol!=\"messenger\"))\n if (current_user!=nil && current_user.rol==\"admin\")\n @user=User.find(params[:id])\n elsif(current_user!=nil && current_user.rol==\"regular\")\n @user=current_user\n end\n @locations = @user.locations\n \n else\n redirect_to root_path\n end\n end",
"def set_menu\n @menu = current_user.restaurant.menus.find(params[:id])\n rescue\n # If the menu is not part of the current_user.restaurant.menus, then we don't have access\n # ActiveRecord will throw an exception so we will rescue it immediately\n render body: nil, status: :forbidden\n end",
"def check_role\n redirect_to(root_url) unless check_role?(\"News Editor\") or check_role?(\"Site Admin\")\n end",
"def check_role\n redirect_to(root_url) unless check_role?(\"News Editor\") or check_role?(\"Site Admin\")\n end",
"def set_core_user_role\n @core_user_role = Core::UserRole.find_by_id(params[:id])\n redirect_to(action: \"index\") and redirect_to if @core_user_role.blank?\n end",
"def select_view\n #redirect_to :action => params[:selection]\n if (SELECTABLE_ACTIONS_ADMIN.include? params[:selection].to_sym) || (SELECTABLE_ACTIONS.include? params[:selection].to_sym)\n redirect_to :action => params[:selection]\n else\n redirect_to :action => 'home'\n end\n end",
"def create\n @role_menu = Security::RoleMenu.new\n @role_menu.security_role_id = params[:id_role]\n @role_menu.security_menu_id = params[:id_menu]\n respond_to do |format|\n if @role_menu.save\n format.html { redirect_to @role_menu, notice: 'Menú creado exitosamente para este rol.' }\n format.json { render :show, status: :created, location: @role_menu }\n format.js {} \n else\n format.html { render :new }\n format.json { render json: @role_menu.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_user\n #begin\n @user = User.find(params[:id])\n if current_user.role_id == 1\n @is_admin = 1\n end\n #rescue ActiveRecord::RecordNotFound => e\n # redirect_to sucampusenv_home_path, notice: \"Oh Ohh!!! You can only see your own profile page.\"\n #end\n end",
"def create\n \n if request.get?\n @role = Role.new\n else\n @role = Role.new(params[:role])\n\n # assign parent role\n if not params[:role][:parent].to_s.empty?\n @role.parent = Role.find(params[:role][:parent].to_i)\n end\n\n if @role.save\n # set the roles's static permissions to the static permission from the parameters \n params[:role][:static_permissions] = [] if params[:role][:static_permissions].nil?\n @role.static_permissions = params[:role][:static_permissions].collect { |i| StaticPermission.find(i) }\n\n # the above should be successful if we reach here; otherwise we \n # have an exception and reach the rescue block below\n flash[:success] = 'Role has been created successfully.'\n redirect_to :action => 'show', :id => @role.id\n else\n render :action => 'create'\n end\n end\n \n rescue ActiveRecord::RecordNotFound\n flash[:error] = 'You sent an invalid request.'\n redirect_to :action => 'list'\n end",
"def authorize_admin\n redirect_to :login unless current_user.permission.manage_app ||\n current_user.permission.manage_attrs ||\n current_user.permission.manage_achievement_categories ||\n current_user.permission.manage_talent_trees ||\n current_user.permission.manage_talents ||\n current_user.permission.manage_quests ||\n current_user.permission.manage_skills ||\n current_user.permission.manage_achievements ||\n current_user.permission.manage_items ||\n current_user.permission.manage_titles\n end",
"def index\n @meals = Menu.meals\n @locations = Menu.locations\n @menus = Menu.all\n params[:meal] ? @selected_meal = params[:meal] : @selected_meal = session[:selected_meal]\n params[:location] ? @selected_location = params[:location] : @selected_location = session[:selected_location]\n @current_items = []\n if current_user\n @menus.each do |menu|\n @current_items.push(menu.items.select{ |item| item.users.include? (current_user)}.flatten)\n end\n end\n if params[:meal] and params[:location]\n session[:selected_meal] = params[:meal]\n session[:selected_location] = params[:location]\n menu = Menu.where(:meal => params[:meal], :location => params[:location]).first\n if not menu.nil?\n redirect_to menu_path(menu.id) and return\n end\n end\n end",
"def index\n @rolesInfo= Role.all.to_a\n @users_a = self.user_to_ar\n @title = self.comm\n # @users = User.order(\"id desc\")\n #分頁\n StoreArea.all.each do |one|\n one.state = 'Y'\n one.save\n # if ur.vip_access.nil?\n # ur.vip_access = 'normal'\n # ur.save\n # end\n end\n @flag = params[:state]\n di = self.user_vip_access\n if @flag=='N'\n\n @users = User.vip_access(user_vip_access , session).stoped.order(sort_column + \" \" + sort_direction).page params[:page]\n\n else\n\n # @users = User.live.where.not(:id=> di ).order(:id).page params[:page]\n @users = User.vip_access(user_vip_access , session).live.order(sort_column + \" \" + sort_direction).page params[:page]\n @flag='Y'\n end\n\n\n #User.order(:name).page params[:page]\n @trades = Trade.sorted\n @normal_users = User.where(:vip_access =>'normal')\n\n # 100.times do |uur|\n # User.create!( :username => \"user#{uur}\",\n # :role_id=> 1 ,\n # :re_password => \"user#{uur}\" ,\n # :vip_access => \"normal\" ,\n # :name =>\"user#{uur}\" ,\n # :state=>'Y' ,\n # :password=>'8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918' ,\n # :email =>\"user#{uur}@yahoo.com.tw\" ,\n # :prompt=>'admin123' )\n # end\n\n\n end",
"def index\n\t\tif signed_in?\n \t\t user=User.find(session[:user_id])\n \t\tcase user.role\n \twhen \"s\"\n \t\tredirect_to supervisor_index_path and return\n \twhen \"a\"\n \t redirect_to receptionnist_index_path and return\n \tend\t\n \tend\n\tend",
"def check_roles\n if current_user.roles.include?(Role['owner'])\n return true\n elsif request.format.symbol == :json && params[:action] == 'index'\n return true\n end\n\n redirect_to root_path\n end",
"def create\n @mensagem = Mensagem.new(mensagem_params)\n \n if session[:admin_id]\n @mensagem.user_id = session[:admin_id]\n @mensagem.all = true\n @mensagem.email = nil\n elsif session[:location] \n @mensagem.user_id = current_user.id\n @mensagem.all = false\n end\n if @mensagem.all\n get_all_mensagem('sistema')\n else \n get_all_mensagem\n end \n \n if @mensagem.save \n if session[:location]\n redirect_to \"/#{session[:local_name]}/chats\" \n else\n render 'mensagems/admin/admin' \n end\n else\n render :new \n end\n \n end",
"def show\n if current_user.rol == ADMIN || current_user.rol == OPERARIO || current_user.rol == RESTAURANTE\n else\n redirect_to '/items'\n end\n end",
"def create_menu\n Rails.logger.warn params.to_s\n menu = Menu.create\n menu.published = false\n if !params[:odesk_id].blank?\n odesk = Odesk.where(access_token:params[:odesk_id]).first\n menu.odesk = odesk\n else\n restaurant = Restaurant.find(params[:restaurant_id])\n menu.restaurant = restaurant\n end\n menu.save\n render json: {id:menu.id}.as_json\n end",
"def index\r\n \r\n\r\n if params[:sivic_celula_id]\r\n if current_user.role == 'ADMINISTRADOR'\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_igreja_id: current_user.sivic_pessoa.sivic_igreja_id, id: params[:sivic_celula_id]}).paginate(:page => params[:page], :per_page => 10)\r\n end\r\n else\r\n if current_user.role == 'LIDER_DE_CELULAS'\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_pessoa_id: current_user.sivic_pessoa.id}).paginate(:page => params[:page], :per_page => 10)\r\n else\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_igreja_id: current_user.sivic_pessoa.sivic_igreja_id}).paginate(:page => params[:page], :per_page => 10)\r\n end\r\n end\r\n end",
"def update_roles\r\n self.roles.create(:title => \"admin\")\r\n if self.name.eql? \"Grandor Eldoran\"\r\n self.roles.create(:title => \"admin\")\r\n elsif self.name.eql? \"Test2 Test2\"\r\n self.roles.create(:title => \"member\")\r\n end\r\n end",
"def role_params\n {}\n end",
"def update\n @user = User.find(params[:user_id])\n @roles = Role.all\n \n if current_user.is_admin?\n @user.roles.clear\n @roles.each do |role|\n if (params[:role][:role][role.rolename][:hasrole].to_s == 1.to_s)\n @user.roles << role\n end\n end\n else\n @roles.each do |role|\n if !role.admin_only\n if @user.has_role?(role.rolename)\n @user.roles.destroy(role)\n end\n if (params[:role][:role][role.rolename][:hasrole].to_s == 1.to_s)\n @user.roles << role\n end\n end\n end\n end\n \n flash[:notice] = I18n.t(\"user.success.roles_updated\")\n reload_page\n \n end",
"def show\n redirect_to :back#:actina => \"after_log_in_prosess\" :action => \"chouse\"\n end",
"def index\n\n @users_a = self.user_to_ar\n\n # Role.all.each do |re|\n # re.state = 'Y'\n # re.save\n # end\n\n # @roles = Role.page params[:page]\n @flag = params[:state]\n if @flag=='N'\n @roles = Role.stoped.order(sort_column + \" \" + sort_direction).page params[:page]\n\n else\n @roles = Role.live.order(sort_column + \" \" + sort_direction).page params[:page]\n @flag='Y'\n end\n end",
"def make_admin\n authorize! @user\n @user.roles = @user.roles + ['admin']\n @user.save\n redirect_to @user, notice: t('user.made_admin', name: @user.username)\n end",
"def index\n @position_exist = Login.current_login.staff.position\n if @position_exist \n #@lesson_plans = LessonPlan.find(:all, :order => \"lecturer ASC, lecture_date DESC\")\n @lesson_plans = LessonPlan.search(params[:search])\n end\n current_roles = Role.find(:all, :joins=>:logins, :conditions=>['logins.id=?', Login.current_login.id]).map(&:authname)\n @is_admin=true if current_roles.include?(\"administration\") || current_roles.include?(\"lesson_plans_module_admin\")|| current_roles.include?(\"lesson_plans_module_viewer\")|| current_roles.include?(\"lesson_plans_module_user\")\n respond_to do |format|\n if @position_exist\n format.html # index.html.erb\n format.xml { render :xml => @lesson_plans }\n else\n format.html { redirect_to \"/home\", :notice =>t('position_required')+t('lesson_plan.title')}\n format.xml\n end\n end\n end",
"def admin_user\n redirect_to(root_url) unless logged_in?&¤t_user.user_role\n end",
"def create\n#\tlista za izbor role se prikazuje samo ako je logovan administrator\n\tif params[:user][:role_id]\n\t\t@role = Role.find(params[:user][:role_id])\n\telse\n\t\t@role = Role.find_by(name: 'registered user');\n\tend\n\t@user = @role.users.build(user_params)\n\t\n respond_to do |format|\n if @user.save\n\t\t \n\t\tset_session_for_user(@user)\n\t\t \n format.html { \n\t\t\tredirect_to addresses_url and return if session[:redirect_to_address]\n\t\t\tredirect_to @user, alert: \"User #{@user.name} was successfully created.\" \n\t\t}\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n\t\n end",
"def set_role_menu\n @role_menu = RoleMenu.find(params[:id])\n end",
"def show\n if current_user.rol == ADMIN || current_user.rol == OPERARIO\n else\n redirect_to '/items'\n end\n end",
"def index\n @catch = params[:protocol_role]\n\n if params[:protocol_role].blank?\n @protocol_roles = ProtocolRole.where(\"user_id = \"+current_user.id.to_s)\n else\n if !params[:protocol_role][:user_id].blank? && params[:protocol_role][:protocol_id].blank?\n @protocol_roles = ProtocolRole.where(\"user_id in (?)\", params[:protocol_role][:user_id]).all \n elsif params[:protocol_role][:user_id].blank? && !params[:protocol_role][:protocol_id].blank?\n @protocol_roles = ProtocolRole.where(\"protocol_id in (?)\", params[:protocol_role][:protocol_id]).all\n elsif !params[:protocol_role][:user_id].blank? && !params[:protocol_role][:protocol_id].blank?\n @protocol_roles = ProtocolRole.where(\"user_id in (?) and protocol_id in (?)\", params[:protocol_role][:user_id], params[:protocol_role][:protocol_id]).all \n end\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @protocol_roles }\n end\n end",
"def index\n redirect_to disabled_path\n # unless logged_in?\n # redirect_to tour_path\n # else\n # @current_item = \"my_hsa\"\n # end\n end",
"def index\n session[:current_room] = nil\n session[:current_user_id] = nil\n puts session[:current_user_id]\n if params[:user]\n @room = Room.find_by(rando: params[:room])\n @user = User.find_by(username: params[:user])\n begin\n @permissions = Permission.where( {user_id: @user.id, room_id: @room.id} )\n if @permissions.count > 0 then\n puts 'yes'\n session[:current_user_id] = @user.username\n session[:current_room] = @room.rando\n redirect_to @room\n end\n rescue\n if @room.nil? then\n puts 'invalid room'\n end\n end\n \n end\n end",
"def edit_roles\n if (@user = find_user(params[:id]))\n begin\n User.transaction(@user) do\n \n roles = params[:user][:roles].collect { |role_id| Role.find(role_id) }\n # add any new roles & remove any missing roles\n roles.each { |role| @user.roles << role if !@user.roles.include?(role) }\n @user.roles.each { |role| @user.roles.delete(role) if !roles.include?(role) }\n\n @user.save\n flash[:notice] = \"Roles updated for user '#{@user.login}'.\"\n end\n rescue\n flash[:warning] = 'Roles could not be edited at this time. Please retry.'\n ensure\n redirect_to :back\n end\n else\n redirect_back_or_default :action => 'list'\n end\n end",
"def index\n @groups = Group.all\n @mentors = Mentor.all\n @profiles = Profile.all\n @home_url = authenticated_root_path\n @profile_url = \"#{current_user.role}/#{current_user.id}\" \n @group_url = \"/groups/#{@groups.name}\"\n @page_title = current_user.role.capitalize\n end",
"def index\n respond_to do |format|\n if signed_in?\n if current_user.roles.any?\n flash.keep\n format.html { redirect_to welcome_data_entry_menu_path }\n else\n flash.keep\n format.html { redirect_to user_registrants_path(current_user) }\n end\n else\n format.html { redirect_to root_path }\n end\n end\n end",
"def index\n @meals = Menu.meals\n @locations = Menu.locations\n @menus = Menu.all\n params[:meal] ? @selected_meal = params[:meal] : @selected_meal = \"\"\n params[:location] ? @selected_location = params[:location] : @selected_location = \"\"\n #session.clear\n # if params[:meal] && params[:location]\n # #session[:selected_meal] = params[:meal]\n # #session[:selected_location] = params[:location]\n # @selected_meal = params[:meal]\n # @selected_location = params[:location]\n # #elsif session[:selected_meal] && session[:selected_location]\n # #@selected_meal = session[:selected_meal]\n # #@selected_location = session[:selected_location]\n # end\n\n if @selected_meal and @selected_location\n menu = Menu.where(:meal => @selected_meal, :location => @selected_location).first\n if not menu.nil?\n redirect_to menu_path(menu.id) and return\n end\n end\n end",
"def show\n\t\tparamsr\n\t\tglobal\n\t\t@menu_ids=[]\n\t\t@role=@roles.find(@id)\n\t\t# render json: @role, each_serializer: RoleSerializer, root: false\n\t\t# respon_to do |format|\n\t\t# \tformat.html\n\t\t# \tformat.json { render json: @role }\n\t\t# end\n\n\t\t@menu_id=MenusRole.where(role_id: @id)\n\t\t@menu_id.all.each do |m| @menu_ids.push(m.menu_id) end\n\t\t@menu=Menu.where(id: @menu_ids)\n\t\t\tif @role.nil?\n\t\t\t\tredirect_to \"/v2/hotels/#{@hotel_id}/roles/\"\n\t\t\tend\n\tend",
"def activar\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_activar = false\n @security_role_type = Security::RoleType.find_by(name: \"Activar\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_activar = true\n break\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_activar = true\n end\n return @permiso_activar\n end\n end\n end",
"def authorize\n if(session[:userName]!='nil')\n unless User.find_by_userName(session[:userName])\n redirect_to(:controller => \"logins\" , :action => \"index\" )\n #if(params[:name]!='nil')\n #redirect_to(:controller => \"logins\", :action => \"index\")\n end\n end\n end",
"def level_to_agent\r\n Admin.to_agent params[:id]\r\n redirect_to :action => 'show_agents' \r\n end",
"def verify_admin\n redirect_to root_url unless current_user.role_id == 1 || current_user.role_id == 2\n end",
"def permiso_reagendar\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_reagendar = false\n @security_role_type = Security::RoleType.find_by(name: \"Reagendar\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_reagendar = true\n break\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_reagendar = true\n end\n if params[:action] == \"reagendar\" && @permiso_reagendar == false\n redirect_to root_path\n end\n return @permiso_reagendar\n end\n end\n end",
"def index_redirect(**opt, &blk)\n opt[:user] = find_user(opt[:user] || current_user || @user)\n opt[:dst] = opt[:user]&.administrator? ? :list_all : :show\n super(**opt, &blk)\n end",
"def show\n @page=Domain.find params[:id]\n @p=@page.role\n case @p\n when 0\n @page.update(:role=>1)\n when 1\n @page.update(:role=>0)\n end\n redirect_to :action=>\"index\"\n\n end",
"def firewall\n @useredit=User.find_by_wedgetail(params[:wedgetail])\n authorize_only (:patient) {@useredit.wedgetail == @user.wedgetail} # everyone can only edit themselves\n authorize :admin # apart from admin\n @currentlist=User.find(:all,:conditions=>[\"firewalls.patient_wedgetail='#{@useredit.wedgetail}'\"],:joins=>\"inner join firewalls on users.wedgetail=firewalls.user_wedgetail\")\n if ! params.has_key? :show or params[:show]==\"Team\"\n @search_type=\"Team\"\n @next_search=\"Individual\"\n @allusers=User.find(:all,:conditions=>[\"role=6\"])\n else\n @search_type=\"Individual\"\n @next_search=\"Team\"\n @allusers=User.find(:all,:conditions=>[\"role=3 or role=4\"])\n end\n @listname=\"greylist\"\n @listname=\"blacklist\" if @useredit.access==2\n @listname=\"whitelist\" if @useredit.access==3\n end",
"def lista_users\n if params[:type_of].to_i == 6\n @role_users = RolesUser.find(:all, :joins => :user, :order => 'login ASC')\n else\n if params[:type_of].to_i == 1\n @role_users = RolesUser.find(:all, :joins => :user, :conditions => [\"users.unidade_id = ?\", current_user.unidade_id],:order => 'login ASC')\n end\n end\n\n render :update do |page|\n page.replace_html 'users', :partial => \"users\"\n end\n\n end",
"def save_login_state\n\n if session[:user_id] && session[:rol_id]\n @current_rol =Rol.find session[:rol_id]\n if(@current_rol[:Tipo]==\"A\")\n redirect_to(:controller => 'web',:action => 'administrador')\n elsif(@current_rol[:Tipo]==\"D\")\n redirect_to(:controller => 'web',:action => 'doctor')\n else\n redirect_to(:controller => 'web',:action => 'usuario')\n end\n return false\n else\n return true\n end\n end",
"def authorize_as_admin\n unless session[:administrator]\n redirect_to(:controller => \"portfolio\", :action => \"default\", :id => session[:username])\n end\n end",
"def create\n puts \"===#{params.dup}\"\n options = params.dup\n\toptions[:organization_user][:status] = OrganizationUser::INITIAL\n\tuser = User.find_by_id(options[:organization_user][:user_id])\n\tif user.nil?\n\t\tflash[:error] = \"You must pick a user to become an admin.\"\n\t\tredirect_to \"/organization_users/new\"\n puts \"after redirect\"\n\t\treturn false\n\tend \n @organization_user = OrganizationUser.new(options[:organization_user]) \n respond_to do |format|\n if @organization_user.save\n User.find(options[:organization_user][:user_id]).roles << Role.find_by_name('OrganizationUser')\n flash[:notice] = 'Organization User was successfully created.'\n format.html { redirect_to \"/organization_users\" }\n format.xml { render :xml => @organization_user, :status => :created, :location => @organization_user }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @organization_user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @show_element=\"admin\"\n @action=\"create2\"\n @rolesgroup = RolesGroup.new\n # set role_name for role select list\n @rolesgroup.role_name = params[:role_name]\n @role = Role.find_by_role_name(params[:role_name])\n end",
"def index\n # check permisison\n authorize! :enabled_menu_index, Vehicle.new\n page = params[:page] || 1\n # See if the API passed a desired code\n country_id_desired =\n if params[:country_code].present?\n desired_country = Country.find_by_code(params[:country_code].upcase)\n desired_country.present? ? desired_country.id : nil\n # ...otherwise load the employee's operating country id\n else\n current_employee.operating_country_id\n end\n\n \n\n # desired auction date or empty string (which is any auction date)\n auction_date_desired = params[:auction_date] || cookies[:auction_date_filter] || \"\"\n\n # desired facility or nil (which is any facility)\n facility_desired = params[:facility_filter_id] || cookies[:facility_filter_id]\n\n # desired seller or nil (which is any seller)\n seller_desired = params[:seller_filter_id] || cookies[:seller_filter_id]\n\n # desired member or nil (which is any member)\n member_desired = params[:member_filter_id] || cookies[:member_filter_id]\n\n # any column not defined in our model is equivalent to \"id ASC\"\n sort_desired = params[:column_as_string] || cookies[:lot_sort_by_column] || \"\"\n\n # the order is ASC unless this parameter specifies \"desc\"\n sort_order = params[:sort_order] || cookies[:lot_sort_by_order] || \"\"\n\n # any state not defined in our AASM is equivalent to .all\n # this one is also a rails level html variable so we may \n # more easily conditionally include partials\n @state_desired = params[:state_as_string] || \"\"\n\n # yard_num is currently (02/04/14) for Copart's API, our UI\n # only uses yard_filter_id. If both are present yard_num is \n # used instead of yard id.\n yard_id_desired = params[:yard_filter_id] || cookies[:yard_filter_id] # normal case\n if params[:yard_num].present?\n desired_yard = Yard.find_by_yard_num(params[:yard_num])\n yard_id_desired = desired_yard.id if desired_yard.present?\n end\n # ... and let's make sure the yard_id is one the employee has access to\n #managing_yards = Yard.for_employee(current_employee)\n #yard_id_desired = nil if !managing_yards.detect { |x| x[:id] == yard_id_desired.to_i }\n\n # Get yards from current employee and find out if yard is accessible or not\n if current_employee.present? && current_employee.yards.present? && yard_id_desired.present?\n yard_id_desired = nil if !current_employee.yards.map(&:id).include?(yard_id_desired.to_i)\n end\n\n # The auction page has a special default sort order, so\n # if we're looking at the ready for auction lots main page\n # and don't have other filters requested we'll apply it\n should_apply_auction_order = (@state_desired == \"ready_for_auction\") &&\n sort_desired.empty? &&\n auction_date_desired.empty?\n @vehicles = Vehicle.for_employee(current_employee)\n .state(@state_desired)\n .ordered_by(sort_desired, sort_order, should_apply_auction_order, params[:state_as_string])\n .order_for_auction_page(should_apply_auction_order)\n .seller(seller_desired)\n .member(member_desired)\n .facility(facility_desired)\n .auction_date(auction_date_desired)\n .yard(yard_id_desired)\n .country(country_id_desired)\n .includes([:api_error, {model: :make}, {vehicle_type: :vehicle_type_group}]).group(\"vehicles.id\")\n # Only get these when loading the html page\n if request.format.html?\n gon.showMailInvoiceButtons = @state_desired == \"wtg_for_settlement_confirmation\"\n csv_lot_state = [\"awaiting_sale_docs\", \"ready_for_auction\", \"ready_for_seller_billing\", \n \"wtg_for_buyer_charge_confirmation\", \"wtg_to_clear_pickup\", \n \"wtg_for_driver_dispatch\", \"wtg_for_inventory\", \"wtg_for_sale_confirmation\",\n \"wtg_for_settlement_confirmation\", \"wtg_for_trip_confirmation\", \n \"wtg_for_title\", \"\"].include?(@state_desired)\n\n if csv_lot_state\n gon.showCsvButton = current_employee.can_enable_field?(\"action_export_to_csv\")\n gon.showSellerCsvButton = current_employee.can_enable_field?(\"action_export_to_seller_csv\")\n gon.showMemberCsvButton = current_employee.can_enable_field?(\"action_export_to_member_csv\")\n end\n\n create_country_specific_gon_variables if request.format.html?\n\n gon.sortOptionAscending = Vehicle::SORT_ASCENDING\n gon.sortOptionDescending = Vehicle::SORT_DESCENDING\n employee_country = current_employee.present? ? current_employee.operating_country : nil\n if @state_desired == \"wtg_for_driver_dispatch\"\n # the form path for the subhaluer (tow provider) mass assign function\n gon.assignTowProviderFormPath = assign_tow_provider_vehicles_path\n elsif @state_desired == \"ready_for_auction\"\n gon.showAuctionDateFilter = true\n gon.auctionLotsTitle = I18n.t(\"views.vehicle.index.auction_lots\") \n current_employee_yards = current_employee.yards.includes(facility: :address).select{|ya| ya.facility.address.country_id.to_s == employee_country.id.to_s }\n gon.auctionDates = AuctionDatum.current_future_datum.for_yards(current_employee_yards).map(&:auction_date).uniq\n end\n\n end\n\n # Filter lot query based on state\n gon.vehicles = tuneup_lots_based_on_state(@vehicles, page)\n gon.parameters = { \"auction_date\" => auction_date_desired,\n \"column_as_string\" => sort_desired,\n \"country_filter_id\" => country_id_desired,\n \"facility_filter_id\" => facility_desired,\n \"seller_filter_id\" => seller_desired, \n \"sort_order\" => sort_order,\n \"member_filter_id\" => member_desired,\n \"state_as_string\" => @state_desired,\n \"yard_filter_id\" => yard_id_desired }\n\n gon.pagination = { current_page: @vehicles.current_page,\n default_per_page: @vehicles.default_per_page,\n per_page: @vehicles.total_count % @vehicles.default_per_page,\n total_pages: @vehicles.total_pages,\n total_count: @vehicles.total_count }\n\n gon.httpSlug = \"\" # To allow for dynamic http path building in vehicles.js.coffee\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: { vehicles: gon.vehicles, \n parameters: gon.parameters, \n pagination: gon.pagination,\n auctionDates: gon.auctionDates } }\n end\n end",
"def admin_permission\n if session[:position].to_s == \"Secretary\" or\n session[:position].to_s == \"Treasurer\" or\n session[:position].to_s == \"Chairman\"\n flash[:notice] = \"RESTRICTED: you do not have access\"\n redirect_to controller: :access, action: :admin_menu, :id => session[:user_id],\n position: session[:position]\n return false\n end\n\n end",
"def authorize\n @logged_in_user = User.find(session[:user_id])\n rescue\n reset_session\n @logged_in_user = nil\n if User.find(:all).length > 0\n session[:jumpto] = request.parameters\n redirect_to :controller => 'authentication', :action => 'login' and return false\n else\n redirect_to :controller => 'authentication', :action => 'create_admin' and return false\n end\n end",
"def index\n if (!user_signed_in? or !(@@institucion_cultural_roles.concat(@@sup_inc_roles).concat(@@exp_inc_roles).include?(current_user.role)) )\n respond_to do |format|\n format.html { redirect_to(new_user_session_path) }\n end\n return\n end\n \n @title_view = 'Cuidado y Mantenimiento'\n\n if (user_signed_in? && (@@sup_inc_roles.include?(current_user.role)))#INC Sup\n @cultural_heritage_care_and_maintenances =\n CulturalHeritage::CareAndMaintenance.find(:all,:conditions=>[\"cultural_heritage_care_and_maintenances.status = ?\",0])\n elsif (user_signed_in? && (@@exp_inc_roles.include?(current_user.role)))#INC Exp\n @cultural_heritage_care_and_maintenances =\n CulturalHeritage::CareAndMaintenance.find(:all,:conditions=>[\"cultural_heritage_care_and_maintenances.expert = ? AND cultural_heritage_care_and_maintenances.status = ?\",current_user.id,0])\n elsif (user_signed_in? && (@@institucion_cultural_roles.include?(current_user.role)))#MIC\n @cultural_heritage_care_and_maintenances =\n CulturalHeritage::CareAndMaintenance.find(:all,:conditions=>[\"cultural_heritage_care_and_maintenances.responsible = ?\",current_user.id])\n else\n @cultural_heritage_care_and_maintenances = []\n end\n# @cultural_heritage_care_and_maintenances = CulturalHeritage::CareAndMaintenance.all\n\n @lista_heritage_select=CulturalHeritage::CulturalHeritage.all\n @lista_institution_select=CulturalHeritage::CulturalInstitution.all\n @security_person=Security::Person.all\n @care_and_maintenance_experts=User.find(:all,:conditions=>[\"role = ?\",@@exp_inc_roles])\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @cultural_heritage_care_and_maintenances }\n end\n end",
"def test_set_role\n\n admin_session = cathy_admin_session\n \n pcb_input_gate = Role.find_by_name('PCB Input Gate')\n tracker_admin = Role.find_by_name('Admin')\n \n post(:set_role, { :id => pcb_input_gate.id }, admin_session)\n session_user = User.find(session[:user_id])\n assert_equal(pcb_input_gate.name, session_user.active_role.name)\n assert_redirected_to(:controller => 'tracker')\n\n post(:set_role, { :id => tracker_admin.id }, admin_session)\n session_user.reload\n assert_equal(tracker_admin.name, session_user.active_role.name)\n assert_redirected_to(:controller => 'tracker')\n\n end",
"def in\n @var = Users.find_users\n @salout = \"Wrong username or password\"\n if cookies[ :user_id ] != nil\n redirect_to :action => \"index\" ,:controller => \"homepage\"\n else\n @var.each do |v|\n if ( v.username == params[ :username ] )\n if( v.encrypted_password == params[ :password ])\n cookies[ :user_id ] = v.id\n cookies[ :username ] = v.username\n #this if condition redirect the user after logging in to the right home page.\n #if the user is an admin redirects to admin homepage otherwise redirects to normal homepage.\n #Author: Rehab A.Elshahawy\n if(v.admin == true)\n redirect_to :action => \"index\" ,:controller => \"admin\"\n else\n redirect_to :action => \"index\" ,:controller => \"homepage\"\n return\n end\n end\n end\n end\n end\nend",
"def restrictToAdmin! ; redirect to('/login'),303 unless admin? ; end",
"def update_choice\n redirect_to (\"/admin/users/#{params[\"id\"]}/edit\")\n end",
"def setUserRole\n if !session[:user_id]\n flash[:notice] = \"Need to login first\"\n redirect_to :action=> 'login'\n end\n\n roomname = params[:setuserrole][\"roomname\"]\n collectionname = (params[:setuserrole][\"collectionname\"].length ==0)? nil:params[:setuserrole][\"collectionname\"]\n nodename = (params[:setuserrole][\"nodename\"].length==0)? nil:params[:setuserrole][\"nodename\"]\n userid = params[:setuserrole][\"userid\"]\n role = params[:setuserrole][\"role\"]\n\n begin\n am = session[:am]\n acc = Account.find_by_username(session[:user_id])\n if(acc.nil?)\n flash[:notice] = \"Need to login first\"\n redirect_to :action=> 'login'\n return\n end\n am.keepalive(acc.username, acc.password)\n\n myroominfo = am.getRoomInfo(roomname)\n if(!myroominfo.nil? and myroominfo.isConnected == true)\n result = am.setUserRole(roomname, userid, role, collectionname, nodename)\n flash[:result] = \"setUserRole result success: \" + result + \" \" + acc.roomURL + \" \" + userid + \" \" + role\n else\n result = \"Room is shutdown, this feature only available when room is started.\"\n flash[:notice] = result\n end\n\n redirect_to :action => 'accountManager'\n rescue Exception => msg\n flash[:notice] = msg\n end\n\n end",
"def masquerade\n @user = User.find(params[:id])\n if @user\n switch_user(@user)\n redirect_to root_url\n else\n redirect_to :action => :list\n end\n end"
] | [
"0.65967965",
"0.6536144",
"0.63166076",
"0.620196",
"0.6177349",
"0.61541533",
"0.61504626",
"0.61400247",
"0.61336964",
"0.61085343",
"0.6066207",
"0.5982902",
"0.593843",
"0.59358656",
"0.590718",
"0.5895988",
"0.5876142",
"0.5870164",
"0.5868835",
"0.5863436",
"0.5840771",
"0.5832372",
"0.5821735",
"0.5813853",
"0.57972425",
"0.5785451",
"0.5780733",
"0.57695144",
"0.5761297",
"0.5760087",
"0.5759884",
"0.57554334",
"0.57450247",
"0.57411957",
"0.573304",
"0.5732432",
"0.5719473",
"0.5714984",
"0.5713877",
"0.5708289",
"0.5680315",
"0.56647295",
"0.5662447",
"0.5638952",
"0.5638952",
"0.56363666",
"0.5635973",
"0.5631467",
"0.56293654",
"0.5620422",
"0.5616835",
"0.5614737",
"0.56100374",
"0.56042194",
"0.55894846",
"0.5588155",
"0.55848897",
"0.5579035",
"0.55729747",
"0.5568629",
"0.55683434",
"0.55600214",
"0.55586094",
"0.55553794",
"0.55459803",
"0.5538568",
"0.55374104",
"0.5531346",
"0.5529042",
"0.552678",
"0.55237764",
"0.55216503",
"0.5514925",
"0.55123615",
"0.55093175",
"0.5508258",
"0.550733",
"0.5505701",
"0.55041516",
"0.549873",
"0.54986495",
"0.54950917",
"0.54916906",
"0.5489056",
"0.5486863",
"0.5483349",
"0.5479744",
"0.54718244",
"0.5466178",
"0.5457482",
"0.5457125",
"0.54538244",
"0.5452331",
"0.5450523",
"0.54498744",
"0.54438764",
"0.5442931",
"0.54372215",
"0.5434352",
"0.5432386",
"0.5426949"
] | 0.0 | -1 |
YOUR CODES DOWN HERE : INDEX : | def index
paramsr
global
# render json: @roles, each_serializer: RoleSerializer, root: false
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def lttfindex\n end",
"def ta_index\n end",
"def index(p0) end",
"def index(p0) end",
"def index ; @index ; end",
"def cursor_to_decision\n @index = 89\n end",
"def setting_index\n end",
"def index=(_arg0); end",
"def legionnairs\n\n end",
"def probers; end",
"def pos; end",
"def pos; end",
"def pos; end",
"def pos; end",
"def pos; end",
"def pos; end",
"def suivre; end",
"def index\n\t\t\n\tend",
"def index\n\t\t\n\tend",
"def index\n\t\t\n\tend",
"def index\n\t\t\n\tend",
"def index\n\t\t\n\tend",
"def index\n\n end",
"def pos\n end",
"def pos\n end",
"def pos\n end",
"def position; end",
"def position; end",
"def position; end",
"def position; end",
"def position; end",
"def position; end",
"def position; end",
"def position; end",
"def who_we_are\r\n end",
"def private; end",
"def index\t\t\n\tend",
"def eplore\n end",
"def terpene; end",
"def index\n \t\n end",
"def index\n \t\n end",
"def apply\n\t\t\t\t\n\t\t\tend",
"def verdi; end",
"def design\r\n \r\n end",
"def list\n index\n end",
"def schubert; end",
"def index\n \n end",
"def index\n @funcs = Func.all\n #↖︎データを受け渡している\n\n idx = 0\n @data = []\n 5.times do |i|\n idx += 1\n place = { \"name\" => \"名前\"}\n distance = 100\n @data[i] = \"#{idx+1}: #{place['name']}: #{distance}m \"\n end\n end",
"def index\n\t\tend",
"def index\r\n\r\n\tend",
"def each_child_index\n end",
"def operations; end",
"def operations; end",
"def intensifier; end",
"def pos() end",
"def pos() end",
"def pos() end",
"def pos() end",
"def buttons; end",
"def schumann; end",
"def order; end",
"def order; end",
"def codepoints()\n #This is a stub, used for indexing\n end",
"def pos()\n #This is a stub, used for indexing\n end",
"def explore(pos)\n end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index; end",
"def index\n\n\tend",
"def index\n\n\tend",
"def index\n\n\tend"
] | [
"0.65698695",
"0.6352658",
"0.6099765",
"0.6099765",
"0.6083747",
"0.59881765",
"0.59734017",
"0.59647906",
"0.5940142",
"0.58772826",
"0.58630717",
"0.58630717",
"0.58630717",
"0.58630717",
"0.58630717",
"0.58630717",
"0.58628696",
"0.58542246",
"0.58542246",
"0.58542246",
"0.58542246",
"0.58542246",
"0.5841846",
"0.58334434",
"0.58334434",
"0.58334434",
"0.58156335",
"0.58156335",
"0.58156335",
"0.58156335",
"0.58156335",
"0.58156335",
"0.58156335",
"0.58156335",
"0.5800046",
"0.57916635",
"0.5770206",
"0.57667005",
"0.5742316",
"0.57381535",
"0.57381535",
"0.57243836",
"0.5703984",
"0.57027686",
"0.56986696",
"0.56953204",
"0.5685442",
"0.56787294",
"0.56730616",
"0.566789",
"0.5666875",
"0.566171",
"0.566171",
"0.56533056",
"0.5643535",
"0.5643535",
"0.5643535",
"0.5643535",
"0.56354564",
"0.5633945",
"0.56292653",
"0.56292653",
"0.56289715",
"0.56266755",
"0.5625787",
"0.5625216",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.5624652",
"0.56215215",
"0.56215215",
"0.56215215"
] | 0.0 | -1 |
SHOW : 1. getting params DONE 2. filter params DONE 3. show roles DONE 4. show MENU DONE 5. redirect DONE 6. WAITING FOR EDIT | def show
paramsr
global
@menu_ids=[]
@role=@roles.find(@id)
# render json: @role, each_serializer: RoleSerializer, root: false
# respon_to do |format|
# format.html
# format.json { render json: @role }
# end
@menu_id=MenusRole.where(role_id: @id)
@menu_id.all.each do |m| @menu_ids.push(m.menu_id) end
@menu=Menu.where(id: @menu_ids)
if @role.nil?
redirect_to "/v2/hotels/#{@hotel_id}/roles/"
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n submenu_item 'role-index'\n @roles = Role.paginate_by_sql(\"select t1.role_id id,t3.role_name,t3.description from\n (select a1.role_id,count(*) permission_num from roles_permissions a1\n where a1.permission_id in (select permission_id from roles_permissions where role_id =#{@current_user.role_id})\n group by a1.role_id) t1,\n (select role_id,count(*) permission_num from roles_permissions where role_id > 1 group by role_id) t2,\n roles t3\n where t1.permission_num = t2.permission_num and t1.role_id = t2.role_id\n and t1.role_id = t3.id \",:page => params[:page], :per_page => 30)\n\n end",
"def show\n @lesson_plan = LessonPlan.find(params[:id])\n current_roles = Role.find(:all, :joins=>:logins, :conditions=>['logins.id=?', Login.current_login.id]).map(&:authname)\n @is_admin=true if current_roles.include?(\"administration\") || current_roles.include?(\"lesson_plans_module_admin\")|| current_roles.include?(\"lesson_plans_module_viewer\")|| current_roles.include?(\"lesson_plans_module_user\")\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @lesson_plan }\n end\n end",
"def admin_show\n @user = Admin.where(role_id: 1).order(name: :asc)\n add_breadcrumb 'utilisateurs', :parametre_admin_path\n add_breadcrumb 'administrateurs & décideurs', :parametre_admins_admin_show_path\n #render layout: 'fylo'\n render layout: 'views/index'\n end",
"def decideur_show\n @user = Admin.where(role_id: 2).order(name: :asc)\n add_breadcrumb 'utilisateur', parametre_admin_path\n add_breadcrumb 'decideurs', parametre_decideurs_path\n render layout: 'fylo'\n end",
"def info_for_edit_page\n @is_super_adm = is_super?\n\n if @is_super_adm\n # Loading Choosing of adm\n @admins = User.admins_list\n\n if @admins.empty?\n @mentors = [@admins]\n else\n employee = @user.client.employee\n if employee.present?\n @admins_cur = employee.employee_id\n @mentors_cur = @user.client.employee_id\n else\n @admins_cur = params[:administrator_id]\n @mentors_cur = 0\n end\n @mentors = User.mentors_list(@admins_cur, additional_users: User.all_local_admins)\n end\n elsif current_user.local_admin?\n @mentors = User.mentors_list(current_user.role_model.id, additional_users: [current_user])\n @mentors_cur = @user.client.employee_id\n end\n end",
"def show\n @sponsor = Program.find(params[:sponsor_id])\n if is_admin?(@sponsor)\n @role = Role.find(params[:id])\n @roles_users = @sponsor.roles_users.for_role(@role).all\n @all_users = User.order(:last_name).to_a\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @role }\n end\n else\n redirect_to(sponsor_path(params[:sponsor_id]))\n end\n end",
"def show\r\n find_record\r\n # before_show callback\r\n if (m = callback_method('before_show') )\r\n ret = call_callback_method(m)\r\n if ret.class == FalseClass\r\n @form['readonly'] = nil # must be\r\n flash[:error] ||= t('drgcms.not_authorized')\r\n return index\r\n end\r\n end \r\n\r\n render action: 'edit', layout: 'cms'\r\nend",
"def show\n if current_user.rol == ADMIN || current_user.rol == OPERARIO || current_user.rol == RESTAURANTE\n else\n redirect_to '/items'\n end\n end",
"def consultar\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_consultar = false\n @security_role_type = Security::RoleType.find_by(name: \"Consultar\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name || role_type.security_role_type.name == \"Pagar\"\n @permiso_consultar = true\n break\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_consultar = true\n end\n if params[:action] == \"show\" && @permiso_consultar == false\n redirect_to root_path\n end\n return @permiso_consultar\n end\n end\n end",
"def show\n @especificacion = Specification.where(:id => session[:specification_sel_id]).first \n @user = User.where(:username => @especificacion.user_id).first \n\nif current_user.acquisition? || current_user.import? || current_user.acquisition_analist? || current_user.import_analist? \n @mostrar_editar = true\n if @user.director? || @user.directorate? || @user.gsmp? || @user.acquisition? || @user.import? || @user.quality? || @user.manage? || @user.acquisition_analist? || @user.import_analist? || @user.quality_analist? || @user.manage_analist? \n\t@mostrar_eliminar = true\n else\n\t@mostrar_eliminar = false\n end\n else\n @mostrar_eliminar = false\n @mostrar_editar = false\n end\n\n if current_user.gsmp? || current_user.quality? || current_user.quality_analist? || current_user.manage? || current_user.manage_analist? || current_user.proy_responsible?\n\t@mostrar_descargar = false\n else\n\t@mostrar_descargar = true\n end\n\n @requisition = Requisition.find(params[:id])\n\n end",
"def show\n redirect_to root_path unless ViewMerchant.new(current_company_user, current_admin, current_merchant_user).check\n determine_user_role()\n load_permission_names()\n end",
"def show\n render layout: false\n @activite = Activite.find(params[:id])\n authorize @activite\n end",
"def show\n if current_user.rol == ADMIN || current_user.rol == OPERARIO\n else\n redirect_to '/items'\n end\n end",
"def show\n @role = session[:role]\n check_access(@role)\n # Show company name if realtor belongs to company\n @realtor = Realtor.find(params[:id])\n if @realtor.companies_id != nil\n @company = Company.find(@realtor.companies_id)\n end\n if session[:role] == 'admin'\n redirect_to login_path\n else\n user = User.find(session[:user_id])\n if user.is_househunter == true\n @switchable = true\n end\n end\n end",
"def show\n authorize RoleCutoff\n end",
"def show\n @roles_and_permission = @roles.roles_and_permission.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @roles_and_permission }\n end\n end",
"def show\n\t\trequire_admin!\n\tend",
"def show\n restrict 'allow only admins' or begin\n @admin = Admin.find_by_id(params[:id]) || current_user\n respond_to do |format|\n format.html # show.rhtml\n format.xml { render :xml => @admin.to_xml }\n end\n end\n end",
"def show\n restrict 'allow only admins' or begin\n @admin = Admin.find_by_id(params[:id]) || current_user\n respond_to do |format|\n format.html # show.rhtml\n format.xml { render :xml => @admin.to_xml }\n end\n end\n end",
"def show\n @admin = Admin.find(params[:id])\n @role = Role.all\n @had_role_ids = @admin.roles.map(&:id)\n\n end",
"def index\r\n \r\n\r\n if params[:sivic_celula_id]\r\n if current_user.role == 'ADMINISTRADOR'\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_igreja_id: current_user.sivic_pessoa.sivic_igreja_id, id: params[:sivic_celula_id]}).paginate(:page => params[:page], :per_page => 10)\r\n end\r\n else\r\n if current_user.role == 'LIDER_DE_CELULAS'\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_pessoa_id: current_user.sivic_pessoa.id}).paginate(:page => params[:page], :per_page => 10)\r\n else\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_igreja_id: current_user.sivic_pessoa.sivic_igreja_id}).paginate(:page => params[:page], :per_page => 10)\r\n end\r\n end\r\n end",
"def show\n if params[:admin].present? and (params[:admin] == 'true')\n render :show_admin\n else\n render :show\n end\n end",
"def show\n @especificacion = Specification.where(:id => session[:specification_sel_id]).first \n @user = User.where(:username => @especificacion.user_id).first \n\nif current_user.acquisition? || current_user.import? || current_user.acquisition_analist? || current_user.import_analist? \n @mostrar_editar = true\n if @user.director? || @user.directorate? || @user.gsmp? || @user.acquisition? || @user.import? || @user.quality? || @user.manage? || @user.acquisition_analist? || @user.import_analist? || @user.quality_analist? || @user.manage_analist? \n\t@mostrar_eliminar = true\n else\n\t@mostrar_eliminar = false\n end\n else\n @mostrar_eliminar = false\n @mostrar_editar = false\n end\n\n if current_user.gsmp? || current_user.quality? || current_user.quality_analist? || current_user.manage? || current_user.manage_analist? || current_user.proy_responsible?\n\t@mostrar_descargar = false\n else\n\t@mostrar_descargar = true\n end\n @servicerequest = Servicerequest.find(params[:id])\n\n end",
"def index\n if current_user.nil?\n show_empty\n elsif current_user.role.id == 1\n show_analist\n elsif current_user.role.id == 2\n show_qa_analist\n elsif current_user.role.id == 3\n show_planner\n elsif current_user.role.id == 5\n show_admin\n else\n show_empty\n end\n end",
"def show\n accion = params[:accion]\n render 'add' and return if accion == 'add'\n render 'remove' and return if accion == 'remove'\n end",
"def display_roles\n \n @active_roles = Role.find_all_active\n \n render(:layout => false)\n \n end",
"def show\n\tif(filters)\n\t\tif(!session[:user_id])\n\t\t\tflash[:error] = \"Acceso denegado\"\n\t\t\tredirect_to home_path\n\t\t\treturn\n\t\tend\n\t\tif(!User.find(session[:user_id]).admin || !User.find(session[:user_id]).active)\n\t\t\tflash[:error] = \"Acceso denegado\"\n\t\t\tredirect_to home_path\n\t\t\treturn\n\t\tend\n\tend\n @user = User.find(params[:id])\n\n\tflash[:active_tab] = \"admin\"\n\t\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @user }\n end\n end",
"def show\n session[:previous_url] = request.referer\n @company_name = Company.find(House.find(params[:id]).companies_id).name\n @houseid = params[:id]\n @role = session[:role]\n\n if @role ==\"househunter\"\n @househunter = Househunter.find_by(:users_id => session[:user_id])\n @interested_househunter = InterestedHousehunter.find_by(:house_id => params[:id], :househunter_id => @househunter.id)\n elsif @role == \"realtor\"\n @realtor = Realtor.find_by(:users_id => session[:user_id])\n end\n\n end",
"def show \n if current_user_pending?\n redirect_to edit_administrator_path(@administrator)\n end\n end",
"def show\n set_administrator\n end",
"def show\n @company = Company.find(params[:company_id])\n @role = Role.find(params[:id])\n if current_user.super_admin == true\n @supported_models = Astrotils::get_models_name.map { |model_name|\n begin\n c = Object.const_get(model_name)\n if c.respond_to?(:get_component_info) and ((c.get_component_info()[0] == :acopio and @company.system_type == false) or (c.get_component_info()[0] == :packing and @company.system_type == true))\n nil\n else\n [c.model_name.human, model_name]\n end\n rescue\n nil\n end\n }\n @supported_models.reject! {|pair| pair == nil}\n else\n @supported_models = []\n Role.where(:company_id => @company.id).map { |r|\n for rol in current_user.roles\n if rol==r\n rol.access_rights.each do |ar|\n c = Object.const_get(ar.model_name)\n if !@supported_models.include?([c.model_name.human, ar.model_name])\n @supported_models << [c.model_name.human, ar.model_name]\n end\n end\n end\n end\n }\n end\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @role }\n end\n end",
"def show\n unless @patron.user.blank?\n access_denied; return\n end\n\n #get_work; get_expression; get_manifestation; get_item\n\n case \n when @work\n @patron = @work.creators.find(params[:id])\n when @manifestation\n @patron = @manifestation.publishers.find(params[:id])\n when @item\n @patron = @item.patrons.find(params[:id])\n else\n if @version\n @patron = @patron.versions.find(@version).item if @version\n end\n end\n\n patron = @patron\n role = current_user.try(:role) || Role.default_role\n @works = Manifestation.search do\n with(:creator_ids).equal_to patron.id\n with(:required_role_id).less_than_or_equal_to role.id\n paginate :page => params[:work_list_page], :per_page => Manifestation.default_per_page\n end.results\n @expressions = Manifestation.search do\n with(:contributor_ids).equal_to patron.id\n with(:required_role_id).less_than_or_equal_to role.id\n paginate :page => params[:expression_list_page], :per_page => Manifestation.default_per_page\n end.results\n @manifestations = Manifestation.search do\n with(:publisher_ids).equal_to patron.id\n with(:required_role_id).less_than_or_equal_to role.id\n paginate :page => params[:manifestation_list_page], :per_page => Manifestation.default_per_page\n end.results\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @patron }\n format.js\n format.mobile\n end\n end",
"def show\n \n if @user.id == current_user.id || current_user.role == \"admin\"\n \n else\n \n redirect_to :root\n flash[:alert] = \"Vous n'avez pas les deroits a acceder ces informations.\"\n end\n end",
"def show\n @user = User.find(params[:id])\n @roles = @user.roles\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @user }\n end\n end",
"def show_details\n effective_sysadmin(@user, @role_limit)\n end",
"def show\n\t\tredirect_to :action => \"edit\", :id => params[:id]\n\tend",
"def show\n\t\tif current_usuario.asignacion_roles.where(esActual: true, id: Rol.where(nombre: \"Voluntario\"), proyecto: @requisito.actividad.proyecto) && current_usuario.asignacion_roles.where(esActual: true, proyecto: @requisito.actividad.proyecto).count == 1\n\t\t\traise CanCan::AccessDenied if !Requisito.accessible_by(current_ability, :show).include?(@requisito)\n\t\telse\n\t\t\tauthorize! :show, Requisito\n\t\tend\n end",
"def show\n admin_only do\n end\n end",
"def show\n checkadmin\n end",
"def show\n\t\tauthorize! :show, DetalleRestriccion\n end",
"def show\n @user = if session[:user].admin? && params.has_key?(:id)\n LinkedData::Client::Models::User.find_by_username(params[:id]).first\n else\n LinkedData::Client::Models::User.find(session[:user].id)\n end\n @all_ontologies = LinkedData::Client::Models::Ontology.all(ignore_custom_ontologies: true)\n @user_ontologies = @user.customOntology\n\n ## Copied from home controller , account action\n onts = LinkedData::Client::Models::Ontology.all;\n @admin_ontologies = onts.select {|o| o.administeredBy.include? @user.id }\n\n projects = LinkedData::Client::Models::Project.all;\n @user_projects = projects.select {|p| p.creator.include? @user.id }\n end",
"def index\n session[:current_tab] = \"Manage\"\n session[:current_sub_tab] = \"Roles\"\n @roles = Role.column_sort(params[:order], params[:direction]).all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @roles }\n end\n end",
"def show\n\tif(filters)\n\t#Revisamos si incio sesion\n\t\tif(!session[:user_id])\n\t\t\tflash[:error] = \"Acceso denegado\"\n\t\t\tredirect_to home_path\n\t\t\treturn\n\t\tend\n #Si no es admin, dueño o participante, chao\n if( (User.find(session[:user_id]).admin) || (Homework.find(params[:id]).user.id == session[:user_id]) || (Participation.exists?(:user_id => session[:user_id])) )\n @homework = Homework.find(params[:id])\n @random = rand(10000)\n @random_hash = Digest::SHA1.hexdigest(@random.to_s)\n @invitations = Participation.find_all_by_homework_id(@homework.id)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @homework }\n end\n else\n flash[:error] = \"Acceso denegado\"\n redirect_to home_path\n return\n end\n\tend\n \n \n end",
"def show\n @user = User.find(params[:id])\n @roles = Role.find(:all)\n # @roles = Role.find(:all)\n #@count=RolesUser.count(:conditions=>[\"user_id=?\",params[:id]])\n #@role=RolesUser.find(:first,:conditions=>[\"user_id=?\",params[:id]])\n #render :text=>role.role.name\n #respond_to do |format|\n # format.html # show.html.erb\n # format.xml { render :xml => @user }\n # end\n end",
"def show\n # authorize Admin\n end",
"def show\n @enable_edit = false\n @admin = false\n \n @dress = Dress.find_by_slug(params[:slug])\n @type = DressType.find_by_name params[:type]\n \n if !@dress.nil? and !@type.nil?\n @related_dresses = @dress.get_related_dresses\n set_dresses_viewed_cookies(@dress)\n @viewed_dresses = get_dresses_viewed(@dress)\n if @dress.supplier_account.nil? or @dress.dress_images.first.nil?\n respond_to do |format|\n format.html { redirect_to bazar_path }\n format.json { head :ok }\n end\n else \n @sa_type = @dress.supplier_account.supplier_account_type.name\n\n if !current_supplier.nil?\n set_supplier_layout\n @supplier = current_supplier\n @supplier_account = current_supplier.supplier_account\n @enable_edit = true\n elsif !current_user.nil? and current_user.role_id == 1 and !@dress.supplier_account.nil?\n @supplier_account = @dress.supplier_account\n @supplier = @supplier_account.supplier\n @enable_edit = true\n @admin = true\n end\n \n @soldable = (@type.name == 'vestidos-novia' ? true : false)\n \n @title_content = @dress.introduction\n \t@meta_description_content = @type.name.gsub('-', ' ').capitalize+': '+@dress.description+' - '+@dress.description\n @og_type = 'article'\n @og_image = 'http://www.tributosport.com'+@dress.dress_images.first.dress.url(:original)\n @og_description = @type.name + ': ' + @dress.description\n\n @title = params[:type]\n generate_bread_crumbs(params[:type])\n add_breadcrumb @dress.introduction.capitalize, dress_ver_path(:type => params[:type], :slug => params[:slug])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dress }\n end\n end\n else\n respond_to do |format|\n format.html { redirect_to bazar_path }\n format.json { head :ok }\n end \n end\n end",
"def show\n #@estrellas = Estrella.select(\"idobjetivo_id\", \"idusuario_id\", \"est1\", \"est2\", \"est3\").where\n @estrella = Estrella.find_by(idusuario_id: current_usuario.id, idobjetivo_id: params[:id])\n \n end",
"def show\n authorize @institute\n @admins = User.where(role:3,institute_id: @institute.id)\n @students = User.where(role:1,institute_id: @institute.id)\n end",
"def show\n if can?(:read, User)\n @user = User.find(params[:id])\n @roles = \"\"\n\n @user.roles.each do |role|\n if @roles == \"\"\n @roles += role.name\n else\n @roles += \", \" + role.name\n end\n end\n end\n\n respond_to do |format|\n format.json { render :json => @user }\n format.xml { render :xml => @user }\n format.html \n end\n\n rescue ActiveRecord::RecordNotFound\n respond_to_not_found(:json, :xml, :html)\n end",
"def index\n \n if can? :admin_reserva, Cautela\n \n @cautelas = Cautela.select(\"cautelas.id,cautelas.militar_id,cautelas.reserva_id, cautelas.data_cautela\",+\n \"cautelas.data_fim_cautela, controles.status status\")\n .joins(\"JOIN controles ON controles.reserva_id = cautelas.reserva_id \")\n .order(\"cautelas.data_cautela desc, cautelas.data_fim_cautela is null\")\n .page(params['page']).per(7)\n else\n @cautelas = Cautela.select(\"cautelas.id,cautelas.militar_id,cautelas.reserva_id, cautelas.data_cautela\",+\n \"cautelas.data_fim_cautela, controles.status status\")\n .joins(\"JOIN controles ON controles.reserva_id = cautelas.reserva_id \")\n .joins(\"JOIN usermilitars ON usermilitars.militar_id = cautelas.militar_id \")\n .where(\"usermilitars.user_id=:user_id\",{user_id:current_user.id}).all\n .order(\"cautelas.data_cautela desc, cautelas.data_fim_cautela is null\")\n .page(params['page']).per(7) \n \n end\n @reservas = Reserva.all\n @militars = Militar.all\n \n end",
"def show\n @role_permision = RolePermision.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @role_permision }\n end\n end",
"def show\n @user_role = UserRole.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n end\n end",
"def show\n\t\tselect_type\n\t\tselect_sub_type\n\t\tunless current_admin.full_access? || current_admin.partial_access?\n\t\t redirect_to \"/404.html\"# configurar pagina 403\n\t\tend\n\tend",
"def show\n authorize @primer\n end",
"def show\n @permissionviews = Permissionview.all\n @roles = Role.all\n end",
"def show\n @especificacion = Specification.where(:id => session[:specification_sel_id]).first \n @user = User.where(:username => @especificacion.user_id).first \n\n if current_user.acquisition? || current_user.import? \n @mostrar_editar = true\n elsif current_user.acquisition_analist? || current_user.import_analist? \n\t if @user.director? || @user.directorate? || @user.gsmp? || @user.acquisition? || @user.import? || @user.quality? || @user.manage? || @user.acquisition_analist? || @user.import_analist? || @user.quality_analist? || @user.manage_analist? \n\t\t@mostrar_editar = true\n else\n\t\t@mostrar_editar = false\n end\n elsif current_user.director? || current_user.directorate? || current_user.gsmp? || current_user.quality? || current_user.quality_analist? || current_user.manage? || current_user.manage_analist? || current_user.section_boss? || current_user.proy_responsible?\n\t@mostrar_editar = false\n else\n\t@mostrar_editar = true\n end\n\n\n if current_user.acquisition? || current_user.import? \n\t if @user.director? || @user.directorate? || @user.gsmp? || @user.acquisition? || @user.import? || @user.quality? || @user.manage? || @user.acquisition_analist? || @user.import_analist? || @user.quality_analist? || @user.manage_analist? \n @mostrar_eliminar = true\n else\n @mostrar_eliminar = false\n end\n elsif current_user.acquisition_analist? || current_user.import_analist? \n\t if @user.director? || @user.directorate? || @user.gsmp? || @user.acquisition? || @user.import? || @user.quality? || @user.manage? || @user.acquisition_analist? || @user.import_analist? || @user.quality_analist? || @user.manage_analist? \n @mostrar_eliminar = true\n else\n @mostrar_eliminar = false\n end\n elsif current_user.director? || current_user.directorate? || current_user.gsmp? || current_user.quality? || current_user.quality_analist? || current_user.manage? || current_user.manage_analist? || current_user.section_boss? || current_user.proy_responsible?\n\t@mostrar_eliminar = false\n else\n\t@mostrar_eliminar = true\n end\n\n\n if current_user.acquisition?\n\tif @user.acquisition? || @user.acquisition_analist?\n\t @mostrar_validar = true\n\telse\n\t @mostrar_validar = false\n\tend\n elsif current_user.import?\n\tif @user.import? || @user.import_analist?\n\t @mostrar_validar = true\n\telse\n\t @mostrar_validar = false\n\tend\n elsif current_user.director? || current_user.directorate? || current_user.gsmp? || current_user.acquisition_analist? || current_user.import_analist? || current_user.quality? || current_user.quality_analist? || current_user.manage? || current_user.manage_analist? || current_user.labassistant? || current_user.proy_responsible?\n\t@mostrar_validar = false\n else\n\t@mostrar_validar = true\n end\n\n\n if current_user.gsmp? || current_user.quality? || current_user.quality_analist? || current_user.manage? || current_user.manage_analist?\n\t@mostrar_descargar = false\n else\n\t@mostrar_descargar = true\n end\n\n @acts = Act.find(params[:id])\n \n end",
"def index\n @location[:sub_menu] = 'list_offers'\n if current_user.has_role?(:admin)\n @offers = Offer.all\n elsif current_user.has_role?(:supplier)\n @offers = Offer.where(:user_id => current_user.id)\n end\n end",
"def show\n @user = User.find(params[:id])\n can_edit_hash = Permissions.currentLoggedInOrHasMorePermissions(@current_user,@user)\n @can_edit = can_edit_hash[:has_permission]\n\n #max needs\n @can_see_pref= Permissions.is_at_least_manager(@current_user)\n\n profile_attrs = [:first_name,:last_name, :email,:phone_number]\n @first_name = @user.first_name\n @last_name = @user.last_name\n @email = @user.email\n @phone_number = @user.phone_number\n @role = @user.role\n end",
"def show\n authorize! :read, @admin_system_admin\n end",
"def show\n admin_only\n end",
"def show\n admin_only\n end",
"def show\n admin_only\n end",
"def show\n @current_admin_user = current_admin_user \n @menu = Menu.find(params[:id])\n @menu_items = @menu.menu_items\n @title = @menu.name\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => {:menu => @menu, :menu_items => @menu_items} }\n end\n end",
"def index\n @users = User.order(:name)\n if( !current_user.isadmin() )\n @users = @users.select { |u| u.isplayer(u.id) == true }\n end\n # print('heloo')\n case params[:format]\n when 'staff'\n @users = @users.select { |u| u.canrole == 'staff' } + @users.select { |u| u.canrole == 'medical' } + @users.select { |u| u.canrole == 'admin' } + @users.select { |u| u.canrole == 'staffpremiere' }\n when 'premiere'\n @users = @users.select { |u| u.canrole == 'premiere' }\n when 'staffpremiere'\n @users = @users.select { |u| u.canrole == 'satffpremiere' }\n when 'm21'\n @users = @users.select { |u| u.canrole == 'm21' }\n when 'm18'\n @users = @users.select { |u| u.canrole == 'm18' }\n when 'm16'\n @users = @users.select { |u| u.canrole == 'm16' }\n when 'm15'\n @users = @users.select { |u| u.canrole == 'm15' }\n when 'fe14'\n @users = @users.select { |u| u.canrole == 'fe14' }\n when 'fe13'\n @users = @users.select { |u| u.canrole == 'fe13' }\n when 'fe12'\n @users = @users.select { |u| u.canrole == 'fe12' }\n when 'fe11'\n @users = @users.select { |u| u.canrole == 'fe11' }\n when 'fc10'\n @users = @users.select { |u| u.canrole == 'fc10' }\n when 'fc9'\n @users = @users.select { |u| u.canrole == 'fc9' }\n when 'fc8'\n @users = @users.select { |u| u.canrole == 'fc8' }\n when 'fc7'\n @users = @users.select { |u| u.canrole == 'fc7' }\n when 'archived'\n @users = @users.select { |u| u.canrole == 'archived' }\n end\n end",
"def index\n\n @articles = Article.paginate(:page => params[:page], :per_page => 2).ultimos\n\n # Si el usuario esta logeado y es admin mostrara esta vista\n if user_signed_in? && current_user.is_editor? && !params.has_key?(:user_normal)\n \n \n render \"admin_article\"\n\n end\n end",
"def show\n authorize! :ver, @usuario_prestamo\n end",
"def show\n case\n when @work\n @agent = @work.creators.find(params[:id])\n when @manifestation\n @agent = @manifestation.publishers.find(params[:id])\n when @item\n @agent = @item.agents.find(params[:id])\n end\n\n agent = @agent\n role = current_user.try(:role) || Role.default\n @works = Manifestation.search do\n with(:creator_ids).equal_to agent.id\n with(:required_role_id).less_than_or_equal_to role.id\n paginate page: params[:work_list_page], per_page: Manifestation.default_per_page\n end.results\n @expressions = Manifestation.search do\n with(:contributor_ids).equal_to agent.id\n with(:required_role_id).less_than_or_equal_to role.id\n paginate page: params[:expression_list_page], per_page: Manifestation.default_per_page\n end.results\n @manifestations = Manifestation.search do\n with(:publisher_ids).equal_to agent.id\n with(:required_role_id).less_than_or_equal_to role.id\n paginate page: params[:manifestation_list_page], per_page: Manifestation.default_per_page\n end.results\n\n respond_to do |format|\n format.html # show.html.erb\n format.json\n format.js\n format.html.phone\n end\n end",
"def show\n if(Admin.first.nil?)\n createAdmin\n end\n\n menu=Menu.new(menu_params)\n # if the menu is saved successfully than respond with json data and status code 201\n if menu.save\n render json: Menu.all, status: 200\n else\n render json: \"422\", status: 422\n end\n # if(params[:id].include?(\"menu\"))\n # render json: \"Test\", status: 200\n # end\n #\n # menu = Menu.all.last # grabs the latest menu\n # render json: menu, status: 200\n end",
"def show\n @roles = Role.all\n end",
"def show\n self.current_user\n \n if not @current_user\n redirect_to login_path\n else\n render :action => 'edit'\n end\n end",
"def show\n @administrator_menu_nivel0_menu_nivel1 = Administrator::MenuNivel0::MenuNivel1.find(params[:id])\n @opcionMenu = Administrator::MenuNivel0.find(@administrator_menu_nivel0_menu_nivel1.administrator_menu_nivel0_id)\n end",
"def information\n @show_buttons = false\n\n @display_username = session[:display_username]\n # If the user has not logged in...\n if ( @display_username.nil? )\n redirect_to root_path\n end\n @fields_allowed_to_edit = Array.new\n @user_roles = session[:user_role]\n this_field = Array.new\n\n @id = params[:id].to_i\n school = params[:id].to_s\n\n # If the user's school = school page to view\n if session[:user_role] == 'admin'\n if school == [@id.to_s, session[:school].parameterize].join(\"-\")\n if params.has_key?(:mode)\n # Editing conditional to URL parameter (\"edit\" or \"view\" values)\n @edit = (params[:mode] == \"edit\")\n @mode = params[:mode]\n else\n # Edit by role default\n @edit = true\n @mode = \"edit\"\n end\n @show_buttons = true\n else\n @edit = false\n @show_buttons = false\n @mode = \"view\"\n end\n @approve = false\n\n if !(@fields = SettingsField.get_editing_fields( session[:user_role_id] )).nil?\n @fields.each do |this_field|\n @fields_allowed_to_edit << this_field.display_sections_id\n end\n end\n elsif session[:user_role] == 'editor'\n if params.has_key?(:mode)\n @edit = (params[:mode] == \"edit\")\n else\n @edit = true\n end\n @approve = true\n @show_buttons = true\n end\n\n @program = Program.find(@id)\n @field_string = FieldsString.find_by_program_id(@id)\n\n @fields_to_display = FieldName.select_fields_to_display( @id )\n\n @fields_to_display.each do |f|\n this_field = Array.new\n\n # Save current values for all fields. This value will be compared against the form\n # values after saving. If they are different, they get saved as \"temp\" values in each\n # table. These new values need to get approved before displaying on the webpage.\n this_field[0] = @id # field program id\n this_field[1] = f.id # field id\n this_field[2] = f.field_name # field name\n this_field[3] = f.field_value.to_s.strip # field original value\n this_field[4] = f.content_type # field or table cell\n this_field[5] = f.field_type # string, text, decimal or integer\n this_field[6] = f.display_sections_id # Display Section id (table)\n\n end\n\n # Get all of the table configurations (title, number of rows and columns)\n @data_table_configs = DataTableConfig.select_tables_by_program_id( @id )\n\n # Get how many table configurations\n table_types_amount = @data_table_configs.count\n @table_types = Array.new( table_types_amount + 1 )\n @table_names = Array.new( table_types_amount + 1 )\n @table_has_subheaders = Array.new( table_types_amount + 1 )\n\n # Create array containing headers, subheaders, categories and data for each table\n @data_table_configs.each do |table_configuration|\n\n first_data_row = 0\n this_row = table_configuration.rows\n this_column_subheader = 1\n table_name = table_configuration.table_name_id\n\n # +1 since arrays start in 0\n @table = Array.new( table_configuration.rows + 1 ) { Array.new( table_configuration.columns + 1) }\n\n data_table = DataTable.select_table_config_by_program_id( @id, table_configuration.id )\n data_table.each do |cell|\n this_cell = Array.new\n\n # Save current values for all table cells. This value will be compared against the form\n # values after saving. If they are different, they get saved as \"temp\" values in each\n # table. These new values need to get approved before displaying on the webpage.\n this_cell[0] = cell.id # table cell id\n this_cell[1] = cell.cell_value.to_s.strip # table cell original value\n this_cell[2] = cell.cell_value_temp.to_s.strip # table cell temporary value\n this_cell[3] = cell.program_id # program_id\n\n # Get the number of the first data row\n if (first_data_row == 0)\n first_data_row = cell.cell_row\n end\n\n # Fills each table type with its cell values per row and column\n if ( this_cell[1] == \"x\" || ( this_cell[1][0..1].include? \"x \") || ( this_cell[1][0..1].include? \"x\\n\") || ( this_cell[1][0..1].include? \"x\\r\") )\n # ascii checkmark symbol\n @table[ cell.cell_row ][ cell.cell_column ] = { :value => this_cell[1].gsub(\"x\",\"\\u2713\"), :temp_value => this_cell[2], :id => this_cell[0] }\n else\n @table[ cell.cell_row ][ cell.cell_column ] = { :value => this_cell[1], :temp_value => this_cell[2], :id => this_cell[0] }\n end\n\n end\n\n # Add categories to the table from the bottom up, if exist\n if ( table_configuration.has_categories )\n categories = Category.select_categories_by_table_config_id( table_configuration.id )\n categories.each do |category|\n if ( category.category.to_s == \"x\" || ( category.category.to_s[0..1].include? \"x \" ) || ( category.category.to_s[0..1].include? \"x\\n\") || ( category.category.to_s[0..1].include? \"x\\r\") )\n # ascii checkmark symbol\n @table[ this_row ][ 1 ] = { :value => category.category.to_s.gsub(\"x\",\"\\u2713\"), :temp_value => nil, :id => category.id }\n else\n @table[ this_row ][ 1 ] = { :value => category.category, :temp_value => nil, :id => category.id }\n end\n this_row -= 1\n end\n end\n\n # Add subheaders to the table from right to left (if the table has subheaders)\n # row 1 = header, row 2 = subheader\n # If first_data_row = 2 there are no subheaders for the table, just a header\n duplicate_subheaders = false\n if ( first_data_row == 3 )\n subheaders = SubHeader.select_subheaders_by_table_name_id( table_configuration.table_name_id )\n\n # -1 because it needs to exclude the categories subheader\n if ( table_configuration.has_categories )\n amount_of_subheaders = subheaders.count - 1\n else\n amount_of_subheaders = subheaders.count\n end\n\n if ( amount_of_subheaders > 0 )\n table_has_subheaders = true\n\n # If the amount of subheaders (minus the category subheader) mod 2 = 0 then\n # all the subheaders need to get duplicated as a comparison table\n if ( table_configuration.has_categories )\n if ( ( table_configuration.columns - 1 ) % 2 == 0 )\n duplicate_subheaders = true\n end\n else\n if ( table_configuration.columns % 2 == 0 )\n duplicate_subheaders = true\n end\n end\n\n subheaders.each do |subheader|\n @table[ 2 ][ this_column_subheader ] = { :value => subheader.subheader, :temp_value => nil, :id => subheader.id }\n\n # Subheaders duplication will happen at current column number + amount_of_subheaders\n if ( duplicate_subheaders && this_column_subheader >= 2 || duplicate_subheaders && this_column_subheader >= 1 && !table_configuration.has_categories )\n @table[ 2 ][ this_column_subheader + amount_of_subheaders ] = { :value => subheader.subheader, :temp_value => nil, :id => subheader.id }\n end\n this_column_subheader += 1\n end\n else\n table_has_subheaders = false\n end\n end\n\n headers = MainHeader.select_headers_by_table_name_id( table_configuration.table_name_id )\n amount_of_headers = headers.count\n\n # Standard table headers start at column 1,\n # if subheaders exist, then headers start at column 2\n if ( duplicate_subheaders )\n column_increment = amount_of_subheaders\n if ( table_configuration.has_categories )\n this_column_header = 2\n else\n this_column_header = 1\n end\n else\n column_increment = 1\n this_column_header = 1\n end\n\n headers.each do |header|\n\n # Adds a column span number between hashes for column >= 2 when categories are present\n # or for tables without categories\n if ( this_column_header >= 2 && table_configuration.has_categories ) || ( this_column_header >= 1 && !table_configuration.has_categories )\n\n #if @table[ 1 ][ this_column_header ].has_key?(\"value\")\n new_value = \"#\" + column_increment.to_s + \"#\" + header.header.to_s.strip\n #else\n #new_value = \"\"\n #end\n @table[ 1 ][ this_column_header ] = { :value => new_value, :temp_value => nil, :id => header.id }\n\n else\n\n @table[ 1 ][ this_column_header ] = { :value => header.header.to_s, :temp_value => nil, :id => header.id }\n\n end\n\n this_column_header += column_increment\n\n end\n\n # Get table name\n table_title = TableName.find( table_configuration.table_name_id )\n\n @table_types[ table_configuration.table_name_id ] = @table\n @table_names[ table_configuration.table_name_id ] = table_title.display_table_name\n @table_has_subheaders[ table_configuration.table_name_id ] = table_has_subheaders\n\n end\n\n end",
"def show\n # Can only view page if you are an admin or the logged in user\n if current_user && (current_user.admin || current_user.id.to_f == params[:id].to_f)\n else\n redirect_to current_user\n end\n end",
"def show\n if current_user.role == 'admin'\n redirect_to '/'\n end\n end",
"def index\n\n if current_user.roleid ==1\n @projekts = Projekt.all\n if params[:id].nil? \n @projekt = Projekt.first\n else\n @projekt = Projekt.find(:all, :conditions => [ \"id = ?\", params[:id]]).first \n end\n else\n @projekts = Projekt.find(:all, :conditions=>[\"projektleiter=?\", current_user.email])\n \n if params[:id].nil? \n @projekt = Projekt.find(:all, :conditions => [ \"projektleiter = ?\", current_user.email]).first \n else\n @projekt = Projekt.find(:all, :conditions => [ \"id = ?\", params[:id]]).first \n end\n end\n\n \n \n @tree = params[:view]\n\n\n @aufgaben = Aufgaben.all\n @arbeitspakets = Arbeitspaket.all\n end",
"def show\n if current_user.role.nil?\n flash.notice = \"You are not authorized for that operation.\"\n redirect_to home_index_path\n elsif current_user.role.include?(\"administrator\") || current_user.role.include?(\"intern\")\n render :show\n elsif current_user.role.include?(\"volunteer\")\n if current_user.customers.include? @order.customer\n render :show\n else\n flash.notice = \"You are not authorized for that operation.\"\n redirect_to home_index_path\n end\n else\n flash.notice = \"You are not authorized for that operation.\"\n redirect_to home_index_path\n end\n end",
"def show\n #@outlet_name = SELECT \"food_courts\".foodCourt FROM \"food_courts\" WHERE \"food_courts\".\"id\" = ? LIMIT 1 [[\"id\", params[:id]];\n #puts(\"name \"+@outlet_name)\n @idMenu = params[:id]\n @menu_list = MenuList.where(outlet_id: @idMenu)\n #@menu_list = MenuList.all\n # retrieve movie ID from URI route\n #@menu_list = MenuList.find(id)\n #@menu_list = MenuList.where(outlet_id: params[:id])\n #redirect_to menu_lists_path\n end",
"def show\n isadmin\n end",
"def admin_show\n @phase = Phase.eager_load(:sections).find_by('phases.id = ?', params[:id])\n authorize @phase\n\n @current = Template.current(@phase.template.dmptemplate_id)\n @edit = (@phase.template.org == current_user.org) && (@phase.template == @current)\n #@edit = params[:edit] == \"true\" ? true : false\n\n #verify if there are any sections if not create one\n @sections = @phase.sections\n if !@sections.any?() || @sections.count == 0\n @section = @phase.sections.build\n @section.phase = @phase\n @section.title = ''\n @section.number = 1\n @section.published = true\n @section.modifiable = true\n @section.save\n @new_sec = true\n end\n #verify if section_id has been passed, if so then open that section\n if params.has_key?(:section_id)\n @open = true\n @section_id = params[:section_id].to_i\n end\n if params.has_key?(:question_id)\n @question_id = params[:question_id].to_i\n end\n if @phase.template.customization_of.present?\n @original_org = Template.where(dmptemplate_id: @phase.template.customization_of).first.org\n else\n @original_org = @phase.template.org\n end\n end",
"def show\n @role = Role.includes(:personal_roles => :person).find_by_slug!(params[:id])\n respond_to do |format|\n format.html\n format.json { render :json => @role }\n end\n end",
"def show_admin\n screen_name(\"Inicial-Admin\")\n\n distribute_ots\n\n respond_to do |format|\n format.html { render action: \"show_admin\" }\n format.json { head :ok }\n end\n end",
"def admin_index\n authorize User\n @users = current_user.org.users.includes(:roles)\n end",
"def show\n @role = @client.roles.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @role }\n end\n end",
"def index\n authorize Admin\n @admins = User.find_by_sql([\"select * from users where role = ?\",2])\n end",
"def show\n unless current_user && current_user.id == @user.id || current_user && current_user.role == 'admin'\n redirect_to root_path\n end\n end",
"def index\n @position_exist = Login.current_login.staff.position\n if @position_exist \n #@lesson_plans = LessonPlan.find(:all, :order => \"lecturer ASC, lecture_date DESC\")\n @lesson_plans = LessonPlan.search(params[:search])\n end\n current_roles = Role.find(:all, :joins=>:logins, :conditions=>['logins.id=?', Login.current_login.id]).map(&:authname)\n @is_admin=true if current_roles.include?(\"administration\") || current_roles.include?(\"lesson_plans_module_admin\")|| current_roles.include?(\"lesson_plans_module_viewer\")|| current_roles.include?(\"lesson_plans_module_user\")\n respond_to do |format|\n if @position_exist\n format.html # index.html.erb\n format.xml { render :xml => @lesson_plans }\n else\n format.html { redirect_to \"/home\", :notice =>t('position_required')+t('lesson_plan.title')}\n format.xml\n end\n end\n end",
"def index\n #@leaveforstaffs = Leaveforstaff.with_permissions_to(:index).find(:all)\n @filters = Leaveforstaff::FILTERS\n if params[:show] && @filters.collect{|f| f[:scope]}.include?(params[:show])\n @leaveforstaffs = Leaveforstaff.with_permissions_to(:index, :order => \"staff_id ASC, leavestartdate ASC\").send(params[:show])\n else\n @leaveforstaffs = Leaveforstaff.with_permissions_to(:index, :order => \"staff_id ASC, leavestartdate ASC\").relevant\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @leaveforstaffs }\n end\n end",
"def show\n authorize! :show, UsuarioCurso\n @badges = BadgeAluno.where(usuario_curso_id: @usuario_curso.id) \n @artefatos = ArtefatoAluno.where(usuario_curso_id: @usuario_curso.id) \n authorize! :show, UsuarioCurso\n end",
"def show\n registration = Registration.find_by(event_id: params[:id], user_id: current_user.id)\n @event = Event.find_by(id: params[:id])\n if registration.role == \"admin\"\n render :show\n else\n render :show_guest\n end\n\n end",
"def show\n @permissions = @role.permissions\n end",
"def show\n authorize @admin\n end",
"def index\n params[:admin_catalogo] = {} unless params[:admin_catalogo]\n params[:admin_catalogo][:nivel1] = 11\n @admin_catalogo = Admin::Catalogo.new(admin_catalogo_index)\n @admin_catalogos = @admin_catalogo.query_index\n\n render action: 'index', locals: { message: @admin_catalogos.any? ? '': 'Tu búsqueda no dio resultados con esos filtros' }\n end",
"def show\n @page_role = PageRole.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @page_role }\n end\n end",
"def show\n\t\tshow_\n\t\trespond_to do |format|\n\t\t\tformat.html # show.html.erb\n\t\t\tformat.xml { render :xml => @role }\n\t\tend\n\tend",
"def show\n authorize :resquest_type, :show?\n end",
"def show\n \n is_user_signed_in(\"part/show\")\n \n if params[:name] \n if params[:id] ## item_id\n @participant = Participant.joins(:items).where(\"items.item_id = :iid\", {iid: params[:id]}) \n render :partial => 'participants/nae_readonly', participant: @participant, item_type: 'borrow' \n end \n end \n \n \n if params[:id] \n @lookup = params[:id] \n else \n @lookup = id \n end \n @editable = 1 \n logger.debug \"IN PART/SHOW\"\n logger.debug @lookup\n if @lookup \n \n case @lookup.to_s \n when 'user_nae' \n render 'user_nae'\n when 'community_members' \n @participant = Participant.where(\"participant_id = :pid\", {pid: getParticipantID})\n @members = Participant.where(\"community_id = :cid\", {cid: getCommunityID})\n render 'participants/community_members', \n :locals => {participant: @participant, members: @members, editable: 0, num_rows: 0}\n when 'user_agreement' \n @participant = Participant.find_by_participant_id( getParticipantID) \n render :template => 'participants/user_agreement', participant: @participant, editable: @editable \n when 'main_user_detail' \n logger.debug \"FINISHED ACTIVATION S?B HERE\"\n session[:current_div] = \"nae\" \n is_user_signed_in(\"main user detail\")\n @user = User.find(getUserID)\n @participant = Participant.find_by(user_id: getUserID)\n logger.debug \n @contact_preference = ContactPreference.where(\"participant_id = :pid\", {pid: getParticipantID}).first\n if @contact_preference.nil?\n @contact_preference = ContactPreference.new\n end\n if getCommunityID\n @community = Community.find_by(user_id: getUserID)\n end\n \n if getUserType == 'lend' || getUserType == 'both'\n @lender_transfer = LenderTransfer.where(\"participant_id = :pid\", {pid: getParticipantID}).first\n \n if @lender_transfer.nil? \n @lender_transfer = LenderTransfer.new\n end\n \n @lender_item_condition = LenderItemCondition.where(\"participant_id = :pid\", {pid: getParticipantID}).first\n \n if @lender_item_condition.nil?\n @lender_item_condition = LenderItemCondition.new\n end\n end\n render 'participants/main_user_detail',\n :locals => {lender_transfer: @lender_transfer, lender_item_condition: @lender_item_condition, \n participant: @participant, community: @community, contact_preference: @contact_preference, user: @user, editable: @editable}\n end \n \n else \n\n unless getParticipantID.nil? \n \t\t @participant = Participant.find_by_participant_id( getParticipantID) \n else \n \t\t @participant = Participant.new \n end \n \t\n \t render 'participants/user_nae', participant: @participant, editable: @editable \n\t\n end \n raise Forbidden, \"You are not allowed to access participant information.\"\n end",
"def show\n debug_log(\"[d] Users_Ctrl: ac: show\") # log\n @show_option = \"user\"\n @current_reptiles = Reptile.where(user_id: current_user)\n \n # ショップが持つ、新入荷レプタイル情報を取得\n if @current_reptiles\n @created_at_desc = @current_reptiles.all.order(created_at: \"DESC\") # 降順\n debug_log(\"[d] Users_Ctrl: ac: show @created_at_desc.count=#{@created_at_desc.count}\") # log\n \n if @created_at_desc.count <= 5\n @new_arrivals = @created_at_desc.first(@created_at_desc.count)\n else\n @new_arrivals = @created_at_desc.first(5)\n end\n debug_log(\"[d] Users_Ctrl: ac: show @new_arrivals=#{@new_arrivals.inspect}\") # log\n debug_log(\"[d] Users_Ctrl: ac: show @new_arrivals.count=#{@new_arrivals.count}\") # log\n end\n \n # ショップが持つ、タイプ毎のReptile情報を取得\n if params[:current_reptile_type].present?\n @show_option = params[:current_reptile_type]\n @current_reptile_type = @current_reptiles.where(type1: params[:current_reptile_type])\n debug_log(\"[d] Users_Ctrl: ac: show @current_reptile_type=#{@current_reptile_type}\") # log\n unless @current_reptile_type.present?\n flash.now[:warning] = \"「#{params[:current_reptile_type]}」の登録は現在ありません\"\n end\n end\n \n # ショップ内で、選択したレプタイルページへ遷移\n if params[:current_select].present?\n @show_option = params[:current_select]\n @current_select_reptile = @current_reptiles.find(params[:current_select])\n debug_log(\"[d] Users_Ctrl: ac: show @current_select_reptile=#{@current_select_reptile}\") # log\n end\n \n debug_log(\"[d] Users_Ctrl: ac: show @show_option=#{@show_option}\") # log\n end",
"def show\n @role = Role.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @role }\n end\n end",
"def show\n if !current_user.isAdmin? and current_user.id != @user.id\n redirect_to user_path(current_user.id)\n end\n end",
"def show\n authorize_action_for(@revise)\n end"
] | [
"0.68534184",
"0.6715386",
"0.6648249",
"0.66386",
"0.6609666",
"0.65830773",
"0.65453887",
"0.6530323",
"0.6523014",
"0.65209013",
"0.65181243",
"0.65065044",
"0.64896894",
"0.64754343",
"0.6455182",
"0.6443072",
"0.64202285",
"0.6364056",
"0.6364056",
"0.6362212",
"0.63596404",
"0.6344545",
"0.63441676",
"0.6305404",
"0.6283836",
"0.62549263",
"0.62296516",
"0.6225891",
"0.6195879",
"0.61889",
"0.6176736",
"0.6175502",
"0.61729765",
"0.6159354",
"0.61486816",
"0.61482036",
"0.6143343",
"0.6142217",
"0.61256224",
"0.61247",
"0.61198634",
"0.61180043",
"0.6116268",
"0.6115071",
"0.6109119",
"0.61075467",
"0.6105001",
"0.60958993",
"0.60942245",
"0.6085655",
"0.60843277",
"0.6077902",
"0.6076558",
"0.6075785",
"0.6063438",
"0.6062842",
"0.6059668",
"0.60526943",
"0.6052141",
"0.6048588",
"0.6048588",
"0.6048588",
"0.60387397",
"0.6036244",
"0.60346365",
"0.6034459",
"0.60329485",
"0.6030798",
"0.60304606",
"0.6025364",
"0.6015586",
"0.6013193",
"0.60061175",
"0.6004933",
"0.60021895",
"0.59998566",
"0.59997976",
"0.5998584",
"0.5994904",
"0.59791857",
"0.5976133",
"0.5969708",
"0.5968379",
"0.59673846",
"0.596572",
"0.5961111",
"0.5957608",
"0.59559184",
"0.59546036",
"0.59503484",
"0.5945932",
"0.5945883",
"0.59431046",
"0.59347695",
"0.59339374",
"0.5932413",
"0.5926472",
"0.5921235",
"0.59209466",
"0.5920311"
] | 0.6570462 | 6 |
NEW : 1. getting params DONE 2. filter params 3. create ROLES 4. create MENUSROLE 5. redirect 6. WAITING FOR EDIT | def new
paramsr
# @arr = []
# @roles=user_hotels.where(id: @hotel_id).first.roles.all
@role=Role.new
@menu=Menu.find_by_sql("SELECT menus.name, menus.id, menus_packages.menu_id FROM menus_packages INNER JOIN menus ON menus.id=menus_packages.menu_id WHERE package_id = #{@package_id} ")
@menu_ids = []
# @package_id = Hotel.where(id: @hotel_id).first.package_id
# @menu_id=MenusPackage.where(package_id: 2) #@package_id)
# @menu_id.each do |m|
# @arr=@arr.push(m.menu_id)
# end
# @menus=Menu.find_by_sql("SELECT menus.name, menus.id, menus_packages.menu_id FROM menus_packages INNER JOIN menus ON menus.id=menus_packages.menu_id WHERE package_id = #{@package_id}")
# @sql = "SELECT menus.name, menus_packages.menu_id FROM menus_packages INNER JOIN menus ON menus.id=menus_packages.menu_id WHERE package_id = #{@package_id}"
# @menu_id = ActiveRecord::Base.connection.execute(@sql).to_s
# @menu_id=MenusPackage.find_by_sql("SELECT menus.name, menus_packages.menu_id FROM menus_packages INNER JOIN menus ON menus.id=menus_packages.menu_id WHERE package_id = #{@package_id}")
# @menus_package = MenusPackage.where(package_id: @package_id)
# @menu_name
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @user = User.where(:name => params[:user][:name]).first\n if @user.nil?\n @user = User.new\n flash[:notice] = '用户不存在!'\n respond_to do |format|\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n return\n end\n \n if @user.admin\n @user = User.new\n flash[:notice] = '用户已经是管理员!'\n respond_to do |format|\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n return\n end\n \n select_roles = params[:user_roles]\n select_roles.each do |role_id|\n @user.user_roles.create(:role_id => role_id)\n end unless select_roles.nil?\n \n @user.admin = true\n \n respond_to do |format|\n if @user.save\n @user.roles.joins(:permissions).select('permissions.controller_name,permissions.action_name,permissions.rest,roles.app_id').each do |record|\n UserVisit.create :controller_name => record.controller_name, :action_name => record.action_name, :rest => record.rest, :app_id => record.app_id, :user_id => @user.id\n end\n format.html { redirect_to admin_role_path(@user), notice: '权限新建成功.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def crear\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_crear = false\n @security_role_type = Security::RoleType.find_by(name: \"Crear\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type| \n if @security_role_type == role_type.security_role_type.name\n @permiso_crear = true\n break\n end\n end\n elsif params[:controller] == \"security/role_type_menus\"\n params[:controller] = \"security/roles\"\n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_crear = true\n break\n end\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_crear = true\n end\n if params[:action] == \"new\" && @permiso_crear == false\n redirect_to root_path\n end\n return @permiso_crear\n end\n end\n end",
"def create\n @company = Company.find(params[:company_id])\n @role = Role.find(params[:role_id])\n access_right_hash = params[:access_right]\n \n if current_user.super_admin\n is_ok = true\n else\n current_user.roles.each { |r|\n r.access_rights.each { |ar|\n puts access_right_hash['model_name']\n if ar.model_name == access_right_hash['model_name'] && ar.action == access_right_hash['action']\n is_ok = true\n end\n }\n }\n end\n \n respond_to do |format|\n if is_ok\n @access_right = @role.access_rights.create(params[:access_right])\n @access_right.company_id = current_user.company_id\n @access_right.save\n format.html { redirect_to company_role_path(@company, @role) }\n else\n format.html { redirect_to company_role_path(@company, @role), notice: 'Usted no puede conceder este permiso.' }\n end\n end\n end",
"def create\n\t\tauthorize! :create, AsignacionRol\n @asignacion_rol = AsignacionRol.new(asignacion_rol_params)\n @asignacion_rol.esActual = true\n @asignacion_rol.active = true\n @proyecto = @asignacion_rol.proyecto\n @roles = []\n @rols = Rol.where(tipo_rol: TipoRol.where(nombre:'Proyecto'))\n coor_sist = Rol.where(\"nombre = ?\", 'Coordinador Sistema').first\n asig_func = AsignacionFuncion.where(\"usuario_id = ? AND rol_id = ?\", current_usuario, coor_sist).first\n @rols.each do |rol|\n if rol.nombre != 'Coordinador' || asig_func != nil then\n @roles << rol\n end\n end\n if unica(@asignacion_rol.usuario_id,@asignacion_rol.proyecto_id, @asignacion_rol.rol_id) == true\n\t respond_to do |format|\n\t if @asignacion_rol.save\n sesion= Sesion.find_by(usuario_id: current_usuario.id, fechaFin: nil)\n Transaccion.create!(\n \t\t descripcion: \"Creación asociación rol #{@asignacion_rol.rol.nombre} al usuario #{@asignacion_rol.usuario.nombreUsuario} del proyecto #{@asignacion_rol.proyecto.nombre} : actual = #{ t @asignacion_rol.esActual.to_s}\",\n \t\t sesion_id: sesion.id ,\n \t\t proyecto_id: @asignacion_rol.proyecto.id)\n\t\tformat.html {redirect_to :controller => 'asignacion_roles', :action => 'index',:proyecto_id => @asignacion_rol.proyecto.id } \n\t\tformat.json { render :show, status: :created, location: @asignacion_rol }\n\t else\n params[:usuario_id] = @asignacion_rol.usuario_id\n \t\tformat.html { render :new }\n \t\tformat.json { render json: @asignacion_rol.errors, status: :unprocessable_entity }\n\t end\n\t end\n else\n\t respond_to do |format|\n\t\t format.html { redirect_to :controller => 'asignacion_roles', :action => 'index', :proyecto_id => @asignacion_rol.proyecto.id\n\t\t flash[:danger] = 'El usuario ya se encuentra asignado' } \n\t end\n end\n end",
"def mod_all\n role = Role.find(params[:id])\n user = User.find(params[:role][:user_id])\n user.roles << role\n\n flash[:notice] = \"La modificacion ha sido realizada correctamente.\"\n\n redirect_to :back\n end",
"def select_role\n if params[:user] && params[:user][:role_id]\n stage_one\n render :new\n else\n redirect_to \"/welcome/advertiser\"\n end\n end",
"def create\n @user = User.new(user_params)\n\n if roles = params[:user][:roles]\n roles.map { |r| r.downcase }.each do |role|\n unless role.empty?\n @user.roles << Role.new(type: role)\n\n if role == \"admin\"\n respond_to do |format|\n if @user.save\n format.html { redirect_to (flash[:redirect] || :attendees), notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end\n\n if role == \"staff\"\n redirect_to get_staff_list_path\n end\n\n end\n end\n end\n end",
"def create\n @roles = Role.paginate :page => params[:page],\n :per_page => 15,\n :order => sort_order('name')\n @role= Role.new(params[:role])\n\n respond_to do |format|\n if @role.save\n format.html { redirect_to(roles_url, :notice => 'New User role successfully added.') }\n format.xml { render :xml => @role, :status => :created, :location => @role }\n else\n format.html { render :action => \"index\" }\n format.xml { render :xml => @role.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:id])\n @user.transaction do\n @user.user_roles.destroy_all\n select_roles = params[:user_roles]\n select_roles.each do |role_id|\n @user.user_roles.create(:role_id => role_id)\n end unless select_roles.nil?\n respond_to do |format|\n if @user.save!\n @user.user_visits.destroy_all\n @user.roles.joins(:permissions).select('permissions.controller_name,permissions.action_name,permissions.rest,roles.app_id').each do |record|\n UserVisit.create :controller_name => record.controller_name, :action_name => record.action_name, :rest => record.rest, :app_id => record.app_id, :user_id => @user.id\n end\n format.html { redirect_to admin_role_url(@user), notice: '权限修改成功.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def edit_roles\n if (@user = find_user(params[:id]))\n begin\n User.transaction(@user) do\n \n roles = params[:user][:roles].collect { |role_id| Role.find(role_id) }\n # add any new roles & remove any missing roles\n roles.each { |role| @user.roles << role if !@user.roles.include?(role) }\n @user.roles.each { |role| @user.roles.delete(role) if !roles.include?(role) }\n\n @user.save\n flash[:notice] = \"Roles updated for user '#{@user.login}'.\"\n end\n rescue\n flash[:warning] = 'Roles could not be edited at this time. Please retry.'\n ensure\n redirect_to :back\n end\n else\n redirect_back_or_default :action => 'list'\n end\n end",
"def update\n authorize(current_user)\n role = params[:user][:role_ids]\n roleModel =Role.find( role)\n if @user.setRole roleModel.name\n @user.save\n redirect_to users_path, :notice => \"Rolle geändert\"\n else\n redirect_to users_path, :notice => \"Rolle nicht geändert\"\n end\n end",
"def create\n @manage_admin = Manage::Admin.new(manage_admin_params)\n @admin_roles= @manage_admin.roles\n respond_to do |format|\n if @manage_admin.save\n\n # 保存角色信息\n roles_id=params[:roles]\n @manage_admin.roles_in_id=roles_id\n\n format.html { redirect_to @manage_admin, notice: \"成功创建管理员#{@manage_admin.nickname}.\" }\n\n format.json { render :show, status: :created, location: @manage_admin }\n else\n format.html { render :new }\n format.json { render json: @manage_admin.errors, status: :unprocessable_entity }\n end\n end\n end",
"def modificar\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_modificar = false\n @security_role_type = Security::RoleType.find_by(name: \"Modificar\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_modificar = true\n break\n end\n end\n elsif params[:controller] == \"security/role_type_menus\"\n params[:controller] = \"security/roles\"\n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_modificar = true\n break\n end\n end\n end\n end\n if params[:controller] == \"service/services\" && current_user.employee.provider_provider_id.nil?\n @permiso_modificar = true\n end\n end\n if current_user.username == \"aadmin\" \n @permiso_modificar = true\n end\n\n if params[:action] == \"edit\" && @permiso_modificar == false\n redirect_to root_path\n end\n return @permiso_modificar\n end\n end\n end",
"def create\n @screen = session.active_screen\n @form_content_reuse = params[:form_content_reuse]\n\n user_ids = params[:role][:user_ids]\n params[:role].delete(:user_ids) unless params[:role][:user_ids].nil?\n @role = Role.new(params[:role]) \n\n if @role.save\n @role.users = User.find(user_ids) unless user_ids.nil?\n @role.users.each{|u| u.update_attributes( :updated_at => Time.now ) }\n end\n \n respond_to do |format|\n format.html # create.html.erb\n format.xml { render :xml => @role }\n end\n end",
"def index\n if current_user.rol == 1\n @roles = Role.order(:id)\n @role = Role.new\n else\n @mensaje = \"Seccion solo para administrador\"\n end\n end",
"def create\n error = ''\n \n if params[:roles]\n params[:roles].each do |role|\n @role = Role.new(role) unless role[:title].blank?\n error << model_errors(@role) unless @role.save\n end\n else\n @role = Role.new(params[:roles])\n error << model_errors(@role) unless @role.save\n end\n \n respond_to do |format|\n format.html do\n if error.blank?\n flash[:notice] = \"#{params[:roles].nil? ? 'Role has' : 'Roles have'} been created.\"\n redirect_back_or_default roles_path\n else\n flash[:error] = 'Oops, something went wrong.'\n @role.nil? ? render(:action => 'edit') : redirect_back_or_default(roles_path)\n end\n end\n \n format.js do\n if error.blank?\n flash.now[:notice] = \"#{params[:roles].nil? ? 'Role has' : 'Roles have'} been created.\"\n get_models\n render :action => 'index', :layout => false\n else\n flash.now[:error] = 'Oops, something went wrong.'\n render :action => 'edit', :layout => false\n end\n end\n end\n end",
"def create\n \n if request.get?\n @role = Role.new\n else\n @role = Role.new(params[:role])\n\n # assign parent role\n if not params[:role][:parent].to_s.empty?\n @role.parent = Role.find(params[:role][:parent].to_i)\n end\n\n if @role.save\n # set the roles's static permissions to the static permission from the parameters \n params[:role][:static_permissions] = [] if params[:role][:static_permissions].nil?\n @role.static_permissions = params[:role][:static_permissions].collect { |i| StaticPermission.find(i) }\n\n # the above should be successful if we reach here; otherwise we \n # have an exception and reach the rescue block below\n flash[:success] = 'Role has been created successfully.'\n redirect_to :action => 'show', :id => @role.id\n else\n render :action => 'create'\n end\n end\n \n rescue ActiveRecord::RecordNotFound\n flash[:error] = 'You sent an invalid request.'\n redirect_to :action => 'list'\n end",
"def new\n @employee = Employee.new\n @lawfirm_admin = User.new\n @roles = Role.scoped_by_company_id(current_user.company_id)\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @lawfirm_admin }\n authorize!(:new,current_user) unless current_user.role?:lawfirm_admin\n end\n end",
"def create\n if !grant_access(\"edit_roles\", current_user)\n head(403)\n end\n @role = Role.new(role_params)\n @role.user_id = current_user.id\n respond_to do |format|\n if @role.save\n format.html { redirect_to @role, notice: 'Role was successfully created.' }\n format.json { render :show, status: :created, location: @role }\n else\n format.html { render :new }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n puts \"===#{params.dup}\"\n options = params.dup\n\toptions[:organization_user][:status] = OrganizationUser::INITIAL\n\tuser = User.find_by_id(options[:organization_user][:user_id])\n\tif user.nil?\n\t\tflash[:error] = \"You must pick a user to become an admin.\"\n\t\tredirect_to \"/organization_users/new\"\n puts \"after redirect\"\n\t\treturn false\n\tend \n @organization_user = OrganizationUser.new(options[:organization_user]) \n respond_to do |format|\n if @organization_user.save\n User.find(options[:organization_user][:user_id]).roles << Role.find_by_name('OrganizationUser')\n flash[:notice] = 'Organization User was successfully created.'\n format.html { redirect_to \"/organization_users\" }\n format.xml { render :xml => @organization_user, :status => :created, :location => @organization_user }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @organization_user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def add\n \n @role = Role.new(:active => 1)\n @users = User.all\n \n render(:action => 'edit')\n \n end",
"def create\n @role = Role.new(role_params)\n @role.create_user_id = session[:user_id]\n #權限\n @atext = ''\n\n\n\n\n if @role.save\n\n params['point'].each do |po|\n @atext += po[0].to_s + ','\n end\n @role.text = @atext\n @role.save\n\n flash[:notice] = \"角色-新增成功!\"\n redirect_to action: \"index\"\n else\n @aces = Access.all\n render action: 'new'\n end\n\n # respond_to do |format|\n # if @role.save\n # format.html { redirect_to @role, notice: 'Role was successfully created.' }\n # format.json { render action: 'show', status: :created, location: @role }\n # else\n # format.html { render action: 'new' }\n # format.json { render json: @role.errors, status: :unprocessable_entity }\n # end\n # end\n end",
"def create\n logger.debug(\"Role create\")\n if params[:from] == 'organisation'\n @organisation = Organisation.find(params[:id])\n @role = Role.new(params[:role])\n @role.organisation_id = @organisation.organisation_id\n @from='organisation'\n else\n @person = Person.find(params[:id])\n @role = Role.new(params[:role])\n @role.person_id = @person.person_id\n @from='person'\n end\n \n @role.updated_by = get_user.login_id\n \n\t# if the editing user does have permission to publish CRM, set the role status to 'Pending'\n\t@role.status_id = Status::PENDING.status_id if !PrivilegesHelper.has_permission?(@login, 'CAN_PUBLISH_CRM')\n\t \n if @role.create_self(@login)\n \n\t # Role.crm_privileges_check(@login, @role)\n\t \n\t contact_name = ''\n # name of the contact\n if !@role.person.blank?\n contact_name = ' for ' + @role.person.full_name\n else\n contact_name = ' for ' + @role.organisation.organisation_list_name(true) unless @role.organisation.blank?\n end\n \n flash[:notice] = 'Role' + contact_name + ' was successfully created.'\n redirect_to :controller => 'role_contactinfos', :action => 'edit', :id => @role\n else\n render :action => 'new', :locals => {@from => params[:from]}\n end\n end",
"def create\n if params and params[:user]\n @role = params[:user][:role] \n \n user_session[\"role\"] = @role if user_session\n end\n super\n end",
"def create\n submenu_item 'role_new'\n load_permissions\n ids=params[:permissions].select{|k,v| v=='1'}.map { |k,v| k.to_i } unless params[:permissions].nil?\n if ids.length > 0\n permissions=Permission.find(:all, :conditions => [\"id in (#{ids.join(',')})\"])\n params[:role][:permissions] = permissions\n @role = Role.new(params[:role])\n if @role.save\n flash[:notice] = \"创建角色成功\"\n redirect_to :action => 'index'\n else\n flash[:error] = \"创建角色失败\"\n render :action => 'new'\n end\n else\n flash[:error] = \"角色名或权限不能为空\"\n redirect_to :action => 'new'\n end\n\n end",
"def create\n # # Why RAW SQL? Because the ORM version was SUPER slow! More than 30 secs to exeucte. Hence, this.\n # query = \"SELECT U.* FROM `users` U \n # INNER JOIN `user_role_maps` RM ON RM.user_id=U.id\n # INNER JOIN `roles` R ON R.id=RM.role_id\n # WHERE R.role='Volunteer' AND U.`phone_no` = '\"+user_params[:phone_no]+\"' AND U.is_deleted='0'\"\n # user = User.find_by_sql query\n\n # User login possibilites...\n # Phone number not found.\n # Found - but deleted(is_deleted = 1)\n # Role is not 'Volunteer'\n # Role is not assigned. At all.\n user = User.find_by_phone_no user_params[:phone_no]\n\n # Phone number not found.\n unless user\n #raise ActionController::RoutingError.new('Not Found')\n @data = {\n :id => 0,\n :is_fc => 0,\n :message => \"Couldn't find any users with that phone number(\" + user_params[:phone_no] + \")\",\n }\n else\n\n # Found - but deleted(is_deleted = 1)\n if(user[:is_deleted] == '1')\n @data = {\n :id => 0,\n :is_fc => 0,\n :message => \"User '\" + user[:first_name] + \" \" + user[:last_name] + \"' has been deleted from the system.\"\n }\n else\n roles_query = \"SELECT R.* FROM `user_role_maps` RM \n INNER JOIN `roles` R ON R.id=RM.role_id\n WHERE RM.user_id='\" + user[:id].to_s + \"'\"\n roles = Role.find_by_sql roles_query\n\n is_fc = 0\n vol = 0\n\n puts roles.inspect\n\n roles.each { |role|\n # Role is not 'Volunteer'\n # Role is not assigned. At all.\n if role[:role] == \"Volunteer\"\n vol = 1\n elsif role[:role] == \"Finance Fellow\"\n is_fc = 1\n end\n }\n\n\n if vol == 0\n @data = {\n :id => 0,\n :is_fc => 0,\n :message => \"User '\" + user[:first_name] + \" \" + user[:last_name] + \"' is not assigned a POC. Please let your CFR Fellow know.\"\n }\n else\n @data = {\n :id => user[:id],\n :is_fc => is_fc,\n :message => \"Login successful.\"\n }\n end\n end\n end\n end",
"def authorized_for_roles(*args)\n # From: http://stackoverflow.com/a/6076035/999973\n # args.any? { |role_name| ROLES.include? role_name }\n # ROLES = %w[admin moderator editor author banned] in user model\n # calling it:\n # before_filter(only: [:edit, :update, :destroy]) {|c| c.authorized_for_roles \"admin\", \"editor\"}\n \n # args.any? { |role_name| current_user.role == role_name }\n \n\n unless signed_in?\n self.current_user = User.create( name: \"Guest\" )\n redirect_to(root_path) unless args.any? { |role_name| current_user.role == role_name }\n self.current_user = nil\n return\n end\n\n redirect_to(root_path) unless args.any? { |role_name| current_user.role == role_name }\n end",
"def update\n load_user\n build_user\n assign_roles\n save_user or render :edit\n end",
"def create\n @role = Role.new(roles_params)\n @roles = Role.all\n #Verificacion de que los campos estén llenos\n if params[:role][:descrip_rol] == \"\"\n @titulo = \"Creacion de rol\"\n @mensaje = \"Debe llenar todos los campos\"\n @tipo = \"warning\"\n @icono = \"icon fa fa-warning\"\n else\n #Verificacion de la repeticion del nombre\n if !RepeticionRolCreate(@roles, params[:role][:descrip_rol])\n @titulo = \"Creacion de rol\"\n @mensaje = \"Ya existe un rol de usuario con ese nombre\"\n @tipo = \"warning\"\n @icono = \"icon fa fa-warning\"\n else\n respond_to do |format|\n if @role.save\n format.js\n format.html {redirect_to @role, notice: \"Rol de usuario creado correctamente\"}\n format.json {render :show, status: :created, location: @role}\n @titulo = \"Creacion de rol\"\n @mensaje = \"Se a creado el rol de usuario correctamente\"\n @tipo = \"success\"\n @icono = \"icon fa fa-check\"\n else\n format.js\n format.html {render :new}\n format.json {render json: @role.errors, status: :unprocessable_entity}\n @titulo = \"Creacion de rol\"\n @mensaje = \"Ha ocurrido un error\"\n @tipo = \"danger\"\n @icono = \"icon fa fa-ban\"\n end\n end\n end\n end\n end",
"def create\n respond_to do |format|\n if @role.save\n format.html { redirect_to user_roles_path(@user), notice: I18n.t('controller.create_success_notice', model: '角色') }\n format.json { render action: 'show', status: :created, location: @role }\n else\n format.html { render action: 'new' }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def role_params\n {}\n end",
"def check_role_update\n unless current_user.is_admin?\n params[:user][:is_admin] = \"0\"\n params[:user][:is_moderator] = \"0\"\n params[:user][:is_sales] = \"0\"\n end\n end",
"def create\n @user = User.new(user_params)\n if @user.role == 'renter'\n @user.company_id == 24\n @user.title == 'renter'\n elsif @user.role = 'realtor'\n @user.company_id == 25\n @user.title == 'realtor'\n end\n @user.company_id = user_params[:company_id] if logged_in?\n @user.role = user_params[:role] if logged_in?\n @user.title = user_params[:title] if logged_in?\n @user.save\n respond_to do |format|\n if @user.save and !logged_in?\n log_in @user\n format.html { redirect_to @user, notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n elsif @user.save and logged_in?\n format.html { redirect_to @user, notice: 'User was successfully created.' }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new, notce: 'User was not created, please try again.' }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @screen = session.active_screen\n @form_content_reuse = params[:form_content_reuse]\n \n @role = Role.find(params[:id]) \n @role.user_ids.each{|u_id| User.find(u_id).update_attributes( :updated_at => Time.now ) }\n @role.users.clear\n @role.users = User.find(params[:role][:user_ids]) unless params[:role][:user_ids].nil?\n @role.user_ids.each{|u_id| User.find(u_id).update_attributes( :updated_at => Time.now ) }\n \n @role.update_attributes(params[:role])\n\n respond_to do |format|\n format.html # create.html.erb\n format.xml { render :xml => @role }\n end\n end",
"def create\n#\tlista za izbor role se prikazuje samo ako je logovan administrator\n\tif params[:user][:role_id]\n\t\t@role = Role.find(params[:user][:role_id])\n\telse\n\t\t@role = Role.find_by(name: 'registered user');\n\tend\n\t@user = @role.users.build(user_params)\n\t\n respond_to do |format|\n if @user.save\n\t\t \n\t\tset_session_for_user(@user)\n\t\t \n format.html { \n\t\t\tredirect_to addresses_url and return if session[:redirect_to_address]\n\t\t\tredirect_to @user, alert: \"User #{@user.name} was successfully created.\" \n\t\t}\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n\t\n end",
"def create\n @role = Role.new(role_params)\n\n # create a Array of action object from each line from table\n respond_to do |format|\n if @role.save\n @i = 0;\n @role.permissions = params[:role][:permission].values.map do |action_p|\n @p = Permission.new(action_p)\n if @p.new || @p.edit || @p.remove then @p.index = true end\n @p.session = @i\n @i = @i + 1\n @p\n end\n @role.save\n format.html { redirect_to management_roles_path }\n format.json { render json: @role, status: :created }\n else\n format.html { render :new }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n authorize! :assign_roles, @user if params[:user][:assign_roles]\n if @user.update_attributes(params[:user])\n redirect_to @user, notice: 'User was successfully updated.'\n else\n render \"edit\"\n end\n end",
"def create\n @role_user = RolesUser.new(params[:roles_user])\n\n\n\n\n respond_to do |format|\n if @role_user.save\n flash[:notice] = 'OK'\n format.html { redirect_to(@role_user) }\n format.xml { render :xml => @role_user, :status => :created, :location => @role_user }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @role_user.errors, :status => :unprocessable_entity }\n end\n end\n\n #respond_to do |format|\n # if @role_user.save\n # flash[:notice] = 'OK'\n # format.html { redirect_to(@role_user) }\n # format.xml { render :xml => @role_user, :status => :created, :location => @role_user }\n # $user_id= @role_user.user_id\n # @user = User.find(:all, :conditions => ['id =' +($user_id).to_s])\n #@user = User.find_by_sql(\"SELECT * FROM roles_users,users WHERE roles_users.user_id = users.id and users.id = \"+($user_id).to_s)\n #$user_id = params[:user_id]\n # for user in @user\n # user.activate\n # end\n # else\n # format.html { render :action => \"new\" }\n # format.xml { render :xml => @role_user.errors, :status => :unprocessable_entity }\n # end\n\n# $user_id= @role_user.user_id\n# @user = User.find(:all, :conditions => ['id =' +($user_id).to_s])\n #@user = User.find_by_sql(\"SELECT * FROM roles_users,users WHERE roles_users.user_id = users.id and users.id = \"+($user_id).to_s)\n #$user_id = params[:user_id]\n# for user in @user\n# user.activate\n\n# end\n\n\n end",
"def create\n @user = User.new(user_params)\n authorize! :create, @user\n authorize! :update, (user_params[:role_id].present? ? Role.find(user_params[:role_id]) : Role)\n\n respond_to do |format|\n if @user.save\n format.html { redirect_to @user, notice: I18n.translate('users.flash.create.success', user: @user.fullname) }\n format.json { render :show, status: :created, location: @user }\n else\n format.html { render :new, notice: I18n.translate('users.flash.create.fail') , status: :unprocessable_entity }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @role = @company.roles.new(safe_params)\n\n respond_to do |format|\n if @role.save\n format.html { redirect_to [@company, @role], notice: 'Role was successfully created.' }\n format.json { render :show, status: :created, location: @role }\n else\n format.html { render :new }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_role\n organisation_change_notice = nil\n @role = Role.find(params[:id])\n \n\tprev_organisation = @role.organisation\n \n\t# if the editing user does have permission to publish CRM, set the role status to 'Pending'\n\t@role.status_id = Status::PENDING.status_id if !PrivilegesHelper.has_permission?(@login, 'CAN_PUBLISH_CRM')\n\t\n\tis_contributor = @role.is_contributor\n\tis_contributor = params[:role][:is_contributor] if (@role.contributor.blank? && !params[:role][:is_contributor].blank?) || (! @role.contributor.blank? && @role.contributor_info_empty?)\n\t\n\t@role.send('is_contributor=', is_contributor)\n\tparams[:role][:is_contributor] = is_contributor\n\t\n\tif !params[:role][:role_type_id].blank? && @role.is_a_contributor? && !RoleType.contributor_role_types.include?(RoleType.find(params[:role][:role_type_id]))\n \n\t flash[:error] = \"An error has occured. You cannot change the role type to a non-contributor type if 'Contributor' field is checked.\"\n\t \n\t redirect_to :action => 'edit', :id => @role\n\t\n\telse\n\t\t\t\n\t if @role.update_attributes(params[:role])\n\n\t if ! @role.person_id.blank? && !params[:role][:organisation_id].blank?\n\t\t # create default_contactinfos\n\t\t # for every person's contactinfo and appropriate\n\t\t # organisation contactinfo\n\t\t @role.default_contactinfos_update\n\t end \n\t \t \n organisation_change_notice=\"\"\n # role has been assigned an organisation\n if ! @role.person_id.blank? && !params[:role][:organisation_id].blank?\n\t \n\t # destroy marketing categorisation of the person from the db\n\t # as person gets marketing categorisation of the organisation\n\t @role.role_categorizations.each do |rc|\n\t \t rc.destroy\n\t end\n\t \n organisation_change_notice = \"<br/> The organisation has been changed. Please check and update the contact information and make sure that it is consistent.\"\n end\n \n # delete default_contactinfo if organisation was previously\n # assigned to a role together with person but has been deleted\n if @role.organisation_id.blank? && !@role.person_id.blank? && !prev_organisation.blank?\n # do we need to default it to 'Person' or 'preferred' contact infos????\n \t # if yes, just call @role.default_contactinfos_update instead of the line below\n\t\t @role.delete_default_contactinfos(prev_organisation.organisation_id)\n end\n \n # update all role role_contactinfos for\n # solr indexing\n RoleContactinfo.index_objects(@role.role_contactinfos)\n \n # update all communications for\n # solr indexing\n Communication.index_objects(@role.communications)\n \n # update appropriate person if any\n # for solr idexing\n if ! @role.person.blank?\n @role.person.save\n end\n\n # destroy contributor record if 'is_contributor' of the role set to false\n @role.contributor.destroy_self if ! @role.contributor.blank? && ! @role.is_a_contributor?\n\n flash[:notice] = 'Role was successfully updated.' + organisation_change_notice\n redirect_to :action => 'edit', :id => @role\n else\n @person = @role.person\n @organisation = @role.organisation unless @role.organisation.blank?\n render :action => 'edit', :id => @role\n end\n \n\tend\n \n end",
"def new\n logger.debug(\"Role new\")\n @role=Role.new\n setup_role\n if params[:from] == 'organisation'\n @organisation = Organisation.find(params[:id])\n @from='organisation'\n else\n @person = Person.find(params[:id])\n @from='person'\n end\n # explicitly set role_type_id to nil\n # so that the first role type in the\n # list is not displayed in role type\n # drop-down\n @role.role_type_id = nil\n\t\n\tset_default_status(@role)\n \n end",
"def create\n\n @roles_and_permission = @roles.roles_and_permission.new(params[:roles_and_permission])\n \n respond_to do |format|\n if @roles_and_permission.save\n format.html { redirect_to [@roles, @roles_and_permission ], notice: 'Roles and permission was successfully created.' }\n format.json { render json: @roles_and_permission, status: :created, location: @roles_and_permission }\n else\n format.html { render action: \"new\" }\n format.json { render json: @roles_and_permission.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @user = User.find(params[:user_id])\n @roles = Role.all\n \n if current_user.is_admin?\n @user.roles.clear\n @roles.each do |role|\n if (params[:role][:role][role.rolename][:hasrole].to_s == 1.to_s)\n @user.roles << role\n end\n end\n else\n @roles.each do |role|\n if !role.admin_only\n if @user.has_role?(role.rolename)\n @user.roles.destroy(role)\n end\n if (params[:role][:role][role.rolename][:hasrole].to_s == 1.to_s)\n @user.roles << role\n end\n end\n end\n end\n \n flash[:notice] = I18n.t(\"user.success.roles_updated\")\n reload_page\n \n end",
"def update\n @roles = Role.all\n\n role = user_params[:role_id] ? user_params[:role_id] : @user.role_id\n if user_params[:password].empty?\n new_params = { :role_id => role,\n :first_name => user_params[:first_name],\n :last_name => user_params[:last_name],\n :email => user_params[:email],\n :telephone => user_params[:telephone] }\n else\n new_params = { :role_id => role,\n :first_name => user_params[:first_name],\n :last_name => user_params[:last_name],\n :email => user_params[:email],\n :telephone => user_params[:telephone],\n :password => user_params[:password],\n :password_confirmation => user_params[:password_confirmation] }\n end\n p = new_params\n respond_to do |format|\n if @user.update(p)\n format.html { redirect_to @user, notice: 'Benutzerdaten wurden aktualisiert.' }\n format.json { render :show, status: :ok, location: @user }\n else\n format.html { render :edit }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def check_roles\n if current_user.roles.include?(Role['owner'])\n return true\n elsif request.format.symbol == :json && params[:action] == 'index'\n return true\n end\n\n redirect_to root_path\n end",
"def create\n #Rails.logger.info \"-------->>>>>>>>>>>>>>>\"\n #Rails.logger.info authority_change_request_params\n #Rails.logger.info params\n @authority_change_request = AuthorityChangeRequest.new(authority_change_request_params)\n @authority_change_request.user = current_user\n @authority_change_request.src_authority = current_user.roles_mask\n\n respond_to do |format|\n if @authority_change_request.save\n #format.html { redirect_to @authority_change_request, notice: 'Authority change request was successfully created.' }\n @user_log = UserLog.create user: @authority_change_request.user, operation: 'user.log.op.role_change_request'\n format.html { redirect_to registrations_profile_path(current_user)}\n format.json { render :show, status: :created, location: @authority_change_request }\n else\n format.html { render :new }\n format.json { render json: @authority_change_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n if current_user == nil\n redirect_to root_path, :alert => 'Access Denied'\n \n else\n if (current_user.role == \"volunteer\") or (current_user.role == nil)\n redirect_to root_path, :alert => 'Access Denied'\n \n else\n build_resource({})\n set_minimum_password_length\n yield resource if block_given?\n respond_with self.resource\n end\n end\n end",
"def reenviar_usuario\n @bandera = false\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_reenviar_usuario = false\n @security_role_type = Security::RoleType.find_by(name: \"Reenviar Usuario\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_reenviar_usuario = true\n break\n elsif role_type.security_role_type.name == \"Consultar\"\n @bandera = true\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_reenviar_usuario = true\n end\n if @bandera == true\n elsif params[:action] == \"forget_username_list\" && @permiso_reenviar_usuario == false\n redirect_to root_path\n end\n return @permiso_reenviar_usuario\n end\n end\n end",
"def index\n submenu_item 'role-index'\n @roles = Role.paginate_by_sql(\"select t1.role_id id,t3.role_name,t3.description from\n (select a1.role_id,count(*) permission_num from roles_permissions a1\n where a1.permission_id in (select permission_id from roles_permissions where role_id =#{@current_user.role_id})\n group by a1.role_id) t1,\n (select role_id,count(*) permission_num from roles_permissions where role_id > 1 group by role_id) t2,\n roles t3\n where t1.permission_num = t2.permission_num and t1.role_id = t2.role_id\n and t1.role_id = t3.id \",:page => params[:page], :per_page => 30)\n\n end",
"def create\n @user = User.new(params[:user])\n @user.memberid = params[:user][:member_id]\n @user.roles = Role.find(params[:role_ids]) if params[:role_ids]\n\n if @user.save\n flash[:notice] = 'Registration successful.'\n redirect_to users_path\n else\n @roles = Role.find(:all)\n render :action => \"new\"\n end\n#\n# if @validateuser.length .nil?\n# @user.save\n# flash[:notice] = 'User was successfully created.'\n# format.html { redirect_to(user) }\n# format.xml { render :xml => @user, :status => :created, :location => @user }\n# else\n# flash[:notice] = 'User already Exists.'\n# format.html { render :action => \"new\" }\n# format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n# end\n\n\n# respond_to do |format|\n# if @user.save\n# flash[:notice] = 'User was successfully created.'\n# format.html { redirect_to(@user) }\n# format.xml { render :xml => @user, :status => :created, :location => @user }\n# else\n# format.html { render :action => \"new\" }\n# format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n# end\n# end\n end",
"def create\n @admin_cie_user = Registry::CieUser.new(admin_cie_user_params.except :admin_cie_role_ids)\n if admin_cie_user_params[:admin_cie_role_ids]\n @admin_cie_user.admin_cie_roles.delete_all\n admin_cie_user_params[:admin_cie_role_ids].each do |role|\n @admin_cie_user.admin_cie_roles << Admin::CieRole.find(role)\n end\n end\n @selected_role = @admin_cie_user.admin_cie_roles.map{|b| b.id}\n respond_to do |format|\n if @admin_cie_user.save\n format.html { redirect_to admin_cie_user_path(@admin_cie_user.id), notice: 'Usuario de registro fue creado exitosamente.' }\n format.json { render :show, status: :created, location: @admin_cie_user }\n else\n format.html { render :new }\n format.json { render json: @admin_cie_user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n params[:user][:role_ids] ||= []\n @user = User.find(params[:id])\n # @user.roles = Role.find(params[:role_ids]) if params[:role_ids]\n @session_user = session[:user]\n if @session_user.is_super_admin\n @roles = Role.find(:all)\n else\n @roles = Role.find(:all, :conditions => [\"id <> 1\"])\n end\n respond_to do |format|\n if @user.update_attributes(params[:user])\n flash[:notice] = 'User was successfully updated.'\n if permitted_to? :index, User.new\n format.html { redirect_to(@user) }\n else\n format.html { redirect_to(:root) }\n end\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n abilities = []\n client_application = current_user.client_application_id.to_s\n @abilities = Role.get_method_names.sort\n @role = Role.new(role_params)\n params[:role][:role_abilities].each do |ability|\n abilities << ability.to_sym\n end\n @role.role_abilities = [{\"action\"=> abilities, \"subject\"=>[:api]}]\n @role.client_application_id = client_application\n respond_to do |format|\n if @role.save\n format.html { redirect_to roles_path, notice: 'Role was successfully created.' }\n format.json { render :index, status: :created, location: @role }\n else\n format.html { render :new }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.new(user_params)\n cost = 10\n encrypted_password = ::BCrypt::Password.create(\"#{user_params[:password]}#{nil}\", :cost => cost).to_s\n @user.assign_attributes(:encrypted_password => encrypted_password, :is_deleted => false)\n @selected_role = user_params[:role_id]\n @user.transaction do\n begin\n if @user.save\n auto_assigned_managers_to_just_below_users\n assign_managers\n respond_to do |format|\n format.html { redirect_to @user, notice: 'Users was successfully created.' }\n format.json { render action: 'show', status: :created, location: @user }\n end\n else\n set_possible_roles\n respond_to do |format|\n if @selected_role.to_s != \"\" || user_params[:city_id] != \"\"\n populate_user_managers_value(@selected_role,user_params[:city_id])\n end\n format.html { render action: 'new' }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n rescue ActiveRecord::Rollback\n handle_rollback\n end\n end\n end",
"def usuarios\n redirect_to :action => \"roles\"\n end",
"def create\n ActiveRecord::Base.transaction do\n @role, hash = fill_role(params[:role])\n\n respond_to do |format|\n if @role.save\n flash[:notice] = 'Role was successfully created.'\n format.html { redirect_to(@role) }\n format.xml { render :xml => @role, :status => :created, :location => @role }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @role.errors, :status => :unprocessable_entity }\n end\n end\n end\n end",
"def make_admin\n authorize! @user\n @user.roles = @user.roles + ['admin']\n @user.save\n redirect_to @user, notice: t('user.made_admin', name: @user.username)\n end",
"def edit\n @user = User.find(params[:id])\n if can?(:update, @user)\n @roles = \"\"\n\n @user.roles.each do |role|\n if @roles == \"\"\n @roles += role.name\n else\n @roles += \", \" + role.name\n end\n end\n else\n @user = nil\n end\n\n respond_to do |format|\n format.json { render :json => @user } \n format.xml { render :xml => @user }\n format.html\n end\n\n rescue ActiveRecord::RecordNotFound\n respond_to_not_found(:json, :xml, :html)\n end",
"def render_create_success\n render json: @resource, include: [:roles], show_roles: true\n end",
"def test_set_role\n\n admin_session = cathy_admin_session\n \n pcb_input_gate = Role.find_by_name('PCB Input Gate')\n tracker_admin = Role.find_by_name('Admin')\n \n post(:set_role, { :id => pcb_input_gate.id }, admin_session)\n session_user = User.find(session[:user_id])\n assert_equal(pcb_input_gate.name, session_user.active_role.name)\n assert_redirected_to(:controller => 'tracker')\n\n post(:set_role, { :id => tracker_admin.id }, admin_session)\n session_user.reload\n assert_equal(tracker_admin.name, session_user.active_role.name)\n assert_redirected_to(:controller => 'tracker')\n\n end",
"def update\n respond_to do |format|\n if @manage_admin.update(manage_admin_params)\n\n # 保存角色信息\n roles_id=params[:roles]\n @manage_admin.roles_in_id=roles_id\n\n format.html { redirect_to @manage_admin, notice: '管理员信息更新成功.' }\n\n format.json { render :show, status: :ok, location: @manage_admin }\n else\n format.html { render :edit,notice:'修改失败'}\n format.json { render json: @manage_admin.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @company = Company.find(params[:company_id])\n if !can?(:manage, @company)\n raise CanCan::AccessDenied.new(\"Usted no puede administrar otra compañia\", :manage, @company)\n end\n @user = User.new(params[:user])\n @roles = Role.all\n role_ids = params[:role_ids] if params[:role_ids] \n role_ids ||= []\n @user.role_ids = role_ids\n @user.company_id = @company.id\n \n if !current_user.super_admin\n @user.super_admin = false\n end\n \n respond_to do |format|\n if @user.save\n format.html { redirect_to company_users_path(@company), notice: 'El usuario fue creado exitosamente.' }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n \n @roles_and_permission = @roles.roles_and_permission.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @roles_and_permission }\n end\n end",
"def create\n authorize! :create, @user, :message => \"No puedes tienes acceso a esta opcion.\"\n @user = User.new(user_params)\n @user.add_role(params[:role])\n @user.save\n redirect_to usuarios_path, notice: 'Usuario fue creado'\n \n end",
"def index\n authorize Role\n @roles = Role.all\n end",
"def create\n @role = Role.new(params[:role])\n\n @perms = params[:permissions[\"permissions\"]]\n if @perms != nil\n @permissions = @perms[\"permissions\"]\n end\n #logger.debug \"PERMISSIONS: #{@permissions.inspect}\"\n if @permissions == nil\n @role.read = 0\n @role.write = 0\n @role.execute = 0\n end\n if @permissions != nil\n if @permissions.include?(\"read\")\n @role.read = 1\n else\n @role.read = 0\n end\n if @permissions.include?(\"write\")\n @role.write = 1\n else\n @role.write = 0\n end\n if @permissions.include?(\"execute\")\n @role.execute = 1\n else\n @role.execute = 0\n end\n end\n \n respond_to do |format|\n if @role.save\n format.html { redirect_to @role, notice: t(:role_created) }\n format.json { render json: @role, status: :created, location: @role }\n else\n format.html { render action: \"new\" }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_role_to_project\n @project_role = ProjectRole.new(params[:project_role])\n\n respond_to do |format|\n if @project_role.save\n format.html { redirect_to assign_roles_project_path(@project_role.project) , notice: 'New project role was successfully added.' }\n else\n format.html { render assign_roles_project_path(@project_role.project) }\n end\n end\n end",
"def update\n @user = User.find_by_username params[:username]\n raise ActiveRecord::RecordNotFound unless @user\n @user_role = Role.find_by_name('User')\n @roles = Role.all.order(:name)\n\n if params[:user][:current_password] # the user is changing their password\n command = ChangePasswordCommand.new(\n @user, params[:user][:current_password],\n params[:user][:password],\n params[:user][:password_confirmation],\n current_user,\n request.remote_ip)\n begin\n command.execute\n rescue ValidationError\n render 'edit'\n rescue => e\n flash['error'] = \"#{e}\"\n render 'edit'\n else\n flash['success'] = @user == current_user ?\n 'Your password has been changed.' :\n \"#{@user.username}'s password has been changed.\"\n keep_flash\n redirect_to edit_user_url(@user)\n end\n elsif params[:user][:desired_institution_id] # the user is changing their institution\n new_institution = Institution.find(params[:user][:desired_institution_id])\n command = JoinInstitutionCommand.new(@user, new_institution,\n current_user, request.remote_ip)\n begin\n command.execute\n rescue ValidationError\n render 'edit'\n rescue => e\n flash['error'] = \"#{e}\"\n render 'edit'\n else\n if @user.institution == new_institution # already joined, which means an admin did it\n flash['success'] = \"#{@user.username} is already a member of \"\\\n \"#{new_institution.name}.\"\n else\n flash['success'] = \"An administrator has been notified and will \"\\\n \"review your request to join #{new_institution.name} momentarily,\"\n end\n keep_flash\n redirect_to dashboard_path\n end\n else # the user is changing their basic info\n command = UpdateUserCommand.new(@user, user_update_params, current_user,\n request.remote_ip)\n begin\n command.execute\n rescue ValidationError\n # Gather institutions for the institution select menu.\n set_institutions_ivar\n render 'edit'\n rescue => e\n flash['error'] = \"#{e}\"\n\n # Gather institutions for the institution select menu.\n set_institutions_ivar\n render 'edit'\n else\n flash['success'] = @user == current_user ?\n 'Your profile has been updated.' :\n \"#{@user.username}'s profile has been updated.\"\n keep_flash\n redirect_to edit_user_url(@user)\n end\n end\n end",
"def update\n @member = Member.find(params[:id])\n# @user = @member.user.id\n\n @roles = Role.find(:all)\n\nif params[:commit] == \"Update Member\"\n checked_roles = []\n checked_params = params[:role_list] || []\n for check_box_id in checked_params\n role = Role.find(check_box_id)\n if not @member.user.roles.include?(role)\n @member.user.roles << role\n end\n checked_roles << role\n end\n missing_roles = @roles - checked_roles\n for role in missing_roles\n if @member.user.roles.include?(role)\n @member.user.roles.delete(role)\n end\n end\nend\n\n respond_to do |format|\n if @member.update_attributes(params[:member]) \n if params[:commit] == \"Update Member\" \n format.html { redirect_to @member, :notice => 'Member was successfully updated. #{undo_link}' }\n format.json { head :ok }\n else #params[:commit] == \"Update Account\" \n format.html { redirect_to account_path(@member), :notice => 'Account was successfully updated.' }\n format.json { head :ok }\n end\n else\n if params[:commit] == \"Update Member\" \n format.html { redirect_to account_path(@member) }#render :action => \"edit\" }\n format.json { render :json => @member.errors, :status => :unprocessable_entity }\n else #params[:commit] == \"Update Account\" \n format.html { redirect_to account_path(@member) }#render :action => \"edit\" }\n format.json { render :json => @member.errors, :status => :unprocessable_entity }\n end\n end\n end\n end",
"def create\n params[:user][:ip_address] = request.remote_ip\n params[:user][:last_login] = DateTime.now\n if !params[:user][:floater].nil? then\n floater = params[:user][:floater]\n params[:user].delete(\"floater\")\n else\n floater = nil\n end\n @user = User.new(params[:user])\n if @user.save then\n session[:user_id] = @user.id\n params[:user_role] = Hash.new\n params[:user_role][:user_id] = @user.id\n params[:user_role][:role_id] = Role.find_by_name(\"USER\").id\n @user_role = UserRole.new(params[:user_role])\n @user_role.save\n if floater.nil? then\n redirect_to(@user)\n else\n render :partial => \"index/floater_thanks\"\n end\n #redirect_to :controller => \"challenges\", :action => \"index\"\n else\n if floater.nil? then\n render :action => \"new\"\n else\n render :partial => \"index/floater\"\n end\n end\n end",
"def update\n # add a blank set of ids, so that we can actually remove roles\n user_hash = {:role_ids => []}\n # merge in what the user has selected\n user_hash = (params[:user].has_key?(:role_ids) ? params[:user] : params[:user].merge({:role_ids => []})) if params[:user]\n # locate the admin role id\n admin_role_id = Role.find_by_title('administrator').id\n # don't let the current user remove their administrative role\n if @object == current_user && @object.is_administrator? && !user_hash[:role_ids].include?(admin_role_id.to_s)\n user_hash[:role_ids] = user_hash[:role_ids] << admin_role_id\n flash[:warning] = 'You cannot remove the administrator role from yourself.'\n end\n # check for new email\n if !params[:user][:new_email].blank? && !params[:user][:email].eql?(params[:user][:new_email])\n params[:user][:email] = params[:user][:new_email]\n params[:user][:new_email] = nil\n end\n # execute the standard CrudController update\n super\n end",
"def update\n\t\tauthorize! :update, AsignacionRol\n @asignacion_rol_viejo = AsignacionRol.find(@asignacion_rol.id)\n @asignacion_rol_viejo.esActual=false\n @asignacion_rol_new = AsignacionRol.new\n @asignacion_rol_new.proyecto = @asignacion_rol_viejo.proyecto\n @asignacion_rol_new.rol_id = params[:asignacion_rol][:rol_id]\n @asignacion_rol_new.esActual=true\n @asignacion_rol_new.usuario=@asignacion_rol.usuario\n if unica(@asignacion_rol_new.usuario_id,@asignacion_rol_new.proyecto_id, @asignacion_rol_new.rol_id) == true\n @asignacion_rol_new.save\n\t respond_to do |format|\n\t if @asignacion_rol_viejo.save\n sesion= Sesion.find_by(usuario_id: current_usuario.id, fechaFin: nil)\n Transaccion.create!(\n\t\t descripcion: \"Actualizar asociación rol #{@asignacion_rol.rol.nombre} al usuario #{@asignacion_rol.usuario.nombreUsuario} del proyecto #{@asignacion_rol.proyecto.nombre} : actual = #{ t @asignacion_rol.esActual.to_s}\",\n\t\t sesion_id: sesion.id ,\n\t\t proyecto_id: @asignacion_rol.proyecto.id)\n\t\tformat.html { redirect_to :controller => 'asignacion_roles', :action => 'index', :proyecto_id => @asignacion_rol.proyecto.id \n\t\tflash[:success] = 'Asignacion rol fue actualizado satisfactoriamente.' }\n\t\tformat.json { render :show, status: :ok, location: @asignacion_rol }\n\t else\n\t\tformat.html { render :edit }\n\t\tformat.json { render json: @asignacion_rol.errors, status: :unprocessable_entity }\n\t end\n\t end\n else\n\t respond_to do |format|\n\n\t\t format.html { redirect_to :controller => 'asignacion_roles', :action => 'index', :proyecto_id => @asignacion_rol.proyecto.id\n\t\t flash[:danger] = 'El usuario ya se encuentra asignado' } \n\t end\n end\n end",
"def edit\n @invite_user = User.where(:invite_token => params[:id], :status => \"invitee\").first\n @role = @invite_user.roles.first\n @inviter = User.where(:id => @role.invite_user_id).first\n @course = @role.course\n @user = User.new(params[:user])\n store_location\n end",
"def update\n #respond_to do |format|\n\t\n\temployee = Employee.find_by id: @employee.id\n\tuser = User.find_by id: @employee.user_id\n\temployee.firstname = params[:firstname]\n\temployee.lastname = params[:lastname]\n\tuser.roles_mask = params[:Role]\n if(user.roles_mask != nil)\n if employee.save && user.save\n\t@employee = Employee.find_by user_id: current_user.id\n\trender \"index\"\n else\n\tflash[:alert] = \"Invalid data\"\n\trender \"edit\"\n end\n else\n\tif employee.save\n\t@employee = Employee.find_by user_id: current_user.id\n\trender \"index\"\n else\n\tflash[:alert] = \"Invalid data\"\n\trender \"edit\"\n end\n end\n end",
"def check_user_role \t \n redirect_to root_path unless current_user.roles.first.name == \"empleado\" or current_user.roles.first.name == \"supervisor\"or current_user.roles.first.name == \"admin\" \n end",
"def update\n @user = User.find(params[:user_id])\n @role = Role.find(params[:id])\n unless @user.has_role?(@role.name)\n @user.roles << @role\n end\n redirect_to :action => 'index'\n end",
"def contributor_details_update\n logger.debug(\"Role contributor_details_update\")\n \n show_params(params)\n \n @role = Role.find(params[:id])\n @role.contributor = Contributor.new if !@role.contributor\n @contributor = @role.contributor\n @contributor.updated_by = get_user.login_id\n\n @person = @role.person\n @organisation = @role.organisation\n \n if @contributor.update_attributes(params[:contributor])\n # updating solr indexes\n\t @role.save\n # if person is associated with the contributor role\n if !@person.blank?\n @person.updated_by = get_user.login_id\n @person.year_of_birth = params[:date][:year_of_birth] unless params[:date].blank?\n @person.year_of_death = params[:date][:year_of_death] unless params[:date].blank?\n if @person.update_attributes(params[:person])\n \n # update all role role_contactinfos for\n # solr indexing\n RoleContactinfo.index_objects(@role.role_contactinfos)\n\t\t \n flash[:notice] = 'Contributor record was successfully updated'\n redirect_to :action => 'contributor_details', :id => @role.role_id \n else\n setup_contributor\n flash[:notice] = 'Contributor record was not able to be updated'\n render :action => 'contributor_details'\n end\n \n else\n # if organisation is associated with the contributor role\n # and person is not\n if !@organisation.blank?\n @organisation.updated_by = get_user.login_id\n @organisation.year_of_establishment = params[:date][:year_of_establishment] unless params[:date].blank?\n if @organisation.update_attributes(params[:organisation])\n \n # update all role role_contactinfos for\n # solr indexing\n RoleContactinfo.index_objects(@role.role_contactinfos)\n\t\t\t\n flash[:notice] = 'Contributor record was successfully updated'\n redirect_to :action => 'contributor_details', :id => @role.role_id\n else\n setup_contributor\n flash[:notice] = 'Contributor record was not able to be updated'\n render :action => 'contributor_details'\n end\n end\n end\n \n else\n setup_contributor\n flash[:notice] = 'Contributor record was not able to be updated'\n render :action => 'contributor_details'\n end\n end",
"def update\n respond_to do |format|\n if @role.update(role_params)\n @role.permissions = params[:role][:permission].map do |key, value|\n index = value[\"index\"] == \"1\" ? true : false\n index = true if value[\"new\"] == \"1\" || value[\"edit\"] == \"1\" || value[\"remove\"] == \"1\"\n Permission.update(value[\"id\"], :index => index, :new => value[\"new\"], :edit => value[\"edit\"], :remove => value[\"remove\"], :import => value[\"import\"])\n end\n format.html { redirect_to management_roles_path }\n format.json { render json: @role, status: :ok }\n else\n format.html { render :edit }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\t\t@role = Role.new(params[:role])\n\t\trespond_to do |format|\n\t\t\tif fonct_new_dup?\n\t\t\t\tobject_orig=Role.find(params[:object_orig_id])\n\t\t\tst = @role.create_duplicate(object_orig)\n\t\t\telse\n\t\t\tst = @role.save\n\t\t\tend\n\t\t\tif st\n\t\t\t\tflash[:notice] = t(:ctrl_object_created,:typeobj =>t(:ctrl_role),:ident=>@role.title)\n\t\t\t\tparams[:id]=@role.id\n\t\t\t\tshow_\n\t\t\t\tformat.html { render :action => \"show\" }\n\t\t\t\tformat.xml { render :xml => @role, :status => :created, :location => @role }\n\t\t\telse\n\t\t\t\tflash[:error] = t(:ctrl_object_not_created,:typeobj =>t(:ctrl_role),:ident=>@role.title, :msg => nil)\n\t\t\t\tformat.html { render :action => \"new\" }\n\t\t\t\tformat.xml { render :xml => @role.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def create\n # rails 3 bug\n params[:user][:roles].reject!(&:blank?)\n @user = User.new(params[:user])\n respond_to do |format|\n if @user.save\n format.html { redirect_to users_url, notice: I18n.t(:new_user_notify, name: @user.name, role: @user.role_str ) }\n format.json { render json: @user, status: :created, location: @user }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user.errors, status: :unprocessable_entity }\n end\n end\n end",
"def redirect\n if params[:role] == \"restaurant\"\n redirect_to :action => \"new\", :invitation => { :first_name => params[:first_name], :last_name => params[:last_name], :email => params[:email] }\n elsif params[:role] == \"media\"\n redirect_to :controller => \"mediafeed/media_users\", :action => \"new\", :user => { :first_name => params[:first_name], :last_name => params[:last_name], :email => params[:email] }\n end\n end",
"def new\n @company = Company.find( params[:company_id])\n @user = @company.users.build\n @roles = ROLE\n end",
"def create\n @role = get_role\n flash[:notice] = t('create.success', :scope => get_i18n_scope) if @role.save and not request.xhr?\n respond_with @event, @role, :location => {:action => 'index'}\n end",
"def contributor_details\n logger.debug(\"Role contributor_details\")\n @role = Role.find(params[:id])\n #@role.updated_by= get_user.login_id\n \n @person = @role.person\n @organisation = @role.organisation\n #if !@person.is_contributor?\n # flash[:notice] = 'Please make this person a contributor before editing contributor information'\n # redirect_to :action => 'edit', :id => @person\n # return\n #end\n setup_contributor\n end",
"def update\n @user = User.find(params[:id])\n params[:user].delete(:id)\n if params[:user][:password].blank? or params[:user][:password_confirmation].blank?\n sign_in_user = true\n else\n sign_in_user = false\n end\n params[:user].delete(:password) if params[:user][:password].blank?\n params[:user].delete(:password_confirmation) if params[:user][:password_confirmation].blank?\n params[:user].delete(:avatar) if params[:user][:avatar].blank?\n if can? :update, Userrole\n params[:foo] = params[:user][:userrole_ids]\n params[:user][:userrole_ids].each do |new_userrole_id|\n unless new_userrole_id.blank?\n new_userrole = Userrole.find(new_userrole_id)\n @user.userroles << new_userrole unless @user.userroles.include?(new_userrole)\n end\n end\n @user.save\n params[:user].delete(:userrole_ids)\n end\n respond_to do |format|\n if @user.update_attributes(params[:user])\n pronoun = ' ' + @user.username\n pronoun_possessive = @user.username + \"'s'\"\n redirect_notice = ' will need to log in again.'\n if current_user?(@user)\n pronoun = ' You'\n pronoun_possessive = \"Your\"\n end\n if sign_in_user\n sign_in @user if current_user?(@user)\n pronoun = ''\n redirect_notice = ''\n end\n format.html { redirect_to(@user, :notice => pronoun_possessive + ' information has been updated.' + pronoun + redirect_notice) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n #raise params.inspect\n #login_id = params[:secretary].delete(:login_id)\n # clinic_id = params[:secretary].delete(:clinic_id)\n gender_id = params[:neuropsychologist].delete(:gender_id)\n \n email = params[:neuropsychologist][:login_attributes].delete(:email)\n pass = params[:neuropsychologist][:login_attributes].delete(:password)\n params[:neuropsychologist][:login_attributes].delete(:password_confirmation)\n params[:neuropsychologist].delete(:login_attributes)\n @neuropsychologist = Neuropsychologist.new(params[:neuropsychologist])\n login = Login.find_by_email(email)\n login.update_attribute(:password,pass)\n @neuropsychologist.login_id = login.id\n @neuropsychologist.gender_id = gender_id\n # manager = Manager.first(:conditions => \"login_id = #{current_login.id}\")\n @neuropsychologist.clinic_id = params[:clinic_id]\n #@neuropsychologist.active = true <- Agora utiliza-se deleted_at para soft deletion\n respond_to do |format|\n if @neuropsychologist.save\n\n Login.find(@neuropsychologist.login.id).add_role :neuropsychologist\n format.html { redirect_to new_login_session_path, notice: 'Conta criada com sucesso. Por favor entre com as credenciais introduzidas.'}\n format.json { render json: @neuropsychologist, status: :created, location: @neuropsychologist }\n else\n format.html { render action: \"new\" }\n format.json { render json: @neuropsychologist.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @show_element=\"admin\"\n @action=\"create2\"\n @rolesgroup = RolesGroup.new\n # set role_name for role select list\n @rolesgroup.role_name = params[:role_name]\n @role = Role.find_by_role_name(params[:role_name])\n end",
"def create\n @user = User.new(params[:user])\n @roles = [Role.find_by_name(params[:user]['role'])]\n @user.person.attributes = (params[:person])\n @user.roles = @roles\n respond_to do |format|\n if @user.save\n flash[:notice] = 'User was successfully created.'\n format.html { redirect_to admin_users_url }\n format.xml { render :xml => @user, :status => :created, :location => @user }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @user.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @user_role = UserRole.new(params[:user_role])\n\n respond_to do |format|\n if @user_role.save\n flash[:notice] = 'UserRole was successfully created.'\n format.html { redirect_to([:admin, @user_role]) }\n else\n format.html { render :action => \"new\" }\n end\n end\n end",
"def index\r\n \r\n\r\n if params[:sivic_celula_id]\r\n if current_user.role == 'ADMINISTRADOR'\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_igreja_id: current_user.sivic_pessoa.sivic_igreja_id, id: params[:sivic_celula_id]}).paginate(:page => params[:page], :per_page => 10)\r\n end\r\n else\r\n if current_user.role == 'LIDER_DE_CELULAS'\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_pessoa_id: current_user.sivic_pessoa.id}).paginate(:page => params[:page], :per_page => 10)\r\n else\r\n @sivic_participantecelulas = SivicParticipantecelula.joins(:sivic_celula).where(sivic_celulas: {sivic_igreja_id: current_user.sivic_pessoa.sivic_igreja_id}).paginate(:page => params[:page], :per_page => 10)\r\n end\r\n end\r\n end",
"def index\n @roles = Role.all.paginate(:page => params[:page], :per_page => 10).order('id desc')\n \n \n if params[:count]\n params[:count]\n else\n params[:count] = 10\n end\n \n if params[:page]\n page = params[:page].to_i\n else\n page = 1\n end\n \n if params[:per_page].present?\n # perpage = params[:per_page]\n @per_page = params[:per_page] || Role.per_page || 10\n @roles = Role.paginate( :per_page => @per_page, :page => params[:page])\n else\n perpage = 10\n end\n @per_page = params[:per_page] || Role.per_page || 10\n page = if params[:page]\n params[:page].to_i\n else\n 1\n end\n \n\n \n# Filter by human part\nif params[:name]\n @name = params[:name]\n logger.info \"Then part is #{@name.inspect}\"\n @roles = Role.where(name: @name).paginate( :page => params[:page], :per_page => 10).order('id desc') \nend\n\n\n \n# \n # per_page = 5\n# \n # offset = (page - 1) * per_page\n # limit = page * per_page\n # @array = *(offset...limit)\n\n\n # if params[:search_value] && params[:search_value].strip != ''\n# \n # if params[:search_param] == 'firstname'\n # logger.info \"the code comes to if firstname............\"\n # @contacts = Contact.firstname_search(params[:search_value].strip).paginate(page: params[:page], per_page: params[:count]).order('ID asc')\n# \n # elsif params[:search_param] == 'lastname'\n # logger.info \"the code comes to elsif lastname.............\"\n # @contacts = Contact.lastname_search(params[:search_value].strip).paginate(page: params[:page], per_page: params[:count]).order('ID asc')\n# \n # else\n # logger.info \"the code comes to the else....\"\n # @contacts = Contact.paginate(page: params[:page], per_page: params[:count]).order('ID desc')\n # @search_json = []\n # end\n# \n # elsif params[:search_param] == 'date'\n # logger.info \"the code comes to elsif date.............\"\n# \n # start = (params[\"start_date\"] + \" \" + \"0:00:00\")# Time.zone.parse(params[\"start_date\"].to_s + \" \" + \"0:00:00\").utc # params[\"start_date\"].to_s + \"0:00:00\"\n # ended = params[\"end_date\"] + \" \" + (\"23:59:59\") # Time.zone.parse(params[\"end_date\"].to_s + \" \" + \"23:59:59\").utc # params[\"end_date\"].to_s + \"23:59:59\"\n # @contacts = Contact.search_date(start,ended).paginate(page: params[:page], per_page: params[:count]).order('ID asc')\n# \n# \n # end\n # p \"JSON ARRAY: #{@search_json}\"\n# \n \n respond_to do |format|\n logger.info \"what is the url calling this??: ans #{request.referer}\"\n # format.js\n format.html\n format.csv { send_data @roles.to_csv(options = {}, page, perpage)}\n format.xls { send_data @roles.to_csv(options={col_sep: \"\\t\"}, page, perpage)}\n end\n end",
"def change_member_status # :norobots:\n pass_query_params\n if @project = find_or_goto_index(Project, params[:id].to_s) and\n @candidate = find_or_goto_index(User, params[:candidate])\n if !@project.is_admin?(@user)\n flash_error(:change_member_status_denied.t)\n redirect_to(:action => 'show_project', :id => @project.id,\n :params => query_params)\n elsif request.method == :post\n user_group = @project.user_group\n admin_group = @project.admin_group\n admin = member = :remove\n case params[:commit]\n when :change_member_status_make_admin.l\n admin = member = :add\n when :change_member_status_make_member.l\n member = :add\n end\n set_status(@project, :admin, @candidate, admin)\n set_status(@project, :member, @candidate, member)\n redirect_to(:action => 'show_project', :id => @project.id,\n :params => query_params)\n end\n end\n end",
"def permiso_anular\n if user_signed_in?\n unless current_user.employee.nil?\n @permiso_anular = false\n @security_role_type = Security::RoleType.find_by(name: \"Anular\").name\n current_user.employee.security_profile.security_role.security_role_menus.each do |security_role_menu| \n if security_role_menu.security_menu.controller == params[:controller] \n security_role_menu.security_role_type_menus.each do |role_type|\n if @security_role_type == role_type.security_role_type.name\n @permiso_anular = true\n break\n end\n end\n end\n end\n if current_user.username == \"aadmin\"\n @permiso_anular = true\n end\n if params[:action] == \"anular\" && @permiso_anular == false\n redirect_to root_path\n end\n return @permiso_anular\n end\n end\n end",
"def filter_roles\n if params[:user] && params[:user][:roles]\n params[:user][:roles] = params[:user][:roles].map(&:to_i) & current_user.assignable_roles\n end\n end",
"def create_manager\n #@event_user = EventUser.new(event_user_params)\n @event_user = EventUser.new(event: @event)\n authorize @event_user\n\n @event_user.user = User.find(permitted_attributes(@event_user)[:user])\n @event_user.role = EventUser.roles[:organizador]\n\n respond_to do |format|\n if @event_user.save\n format.html { redirect_to admin_event_users_path(:code => @event.code),\n notice: 'Paticipante foi CRIADO com sucesso.' }\n format.json { render :show, status: :created, location:[:admin, @event_user] }\n else\n format.html {\n flash[:Error] = \"O Organizador já está inscrito no evento.\"\n redirect_to new_manager_admin_event_users_path(:code => @event.code)}\n format.json { render json: [:admin, @event_user].errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_permissions\r\n @role = Role.find(params[:id])\r\n end",
"def update\n # this action is not provided for partyroles\n end",
"def new\n\t\t@roles_new = Role.all\n\t\tif can?(:update, Role)\n\t\t\t@roles_new = Role.all\n\t\tend\n\n @user = User.new if can?(:create, User)\n respond_to do |format|\n format.json { render :json => @user } \n format.xml { render :xml => @user }\n format.html\n end\n end",
"def update\n if !grant_access(\"alter_roles\", current_user)\n head(403)\n end\n @role.user_id = current_user.id\n @role.start_point = false if !params[:role][:start_point]\n respond_to do |format|\n if @role.update(role_params)\n format.html { redirect_to @role, notice: 'Role was successfully updated.' }\n format.json { render :show, status: :ok, location: @role }\n else\n format.html { render :edit }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end",
"def setUserRole\n if !session[:user_id]\n flash[:notice] = \"Need to login first\"\n redirect_to :action=> 'login'\n end\n\n roomname = params[:setuserrole][\"roomname\"]\n collectionname = (params[:setuserrole][\"collectionname\"].length ==0)? nil:params[:setuserrole][\"collectionname\"]\n nodename = (params[:setuserrole][\"nodename\"].length==0)? nil:params[:setuserrole][\"nodename\"]\n userid = params[:setuserrole][\"userid\"]\n role = params[:setuserrole][\"role\"]\n\n begin\n am = session[:am]\n acc = Account.find_by_username(session[:user_id])\n if(acc.nil?)\n flash[:notice] = \"Need to login first\"\n redirect_to :action=> 'login'\n return\n end\n am.keepalive(acc.username, acc.password)\n\n myroominfo = am.getRoomInfo(roomname)\n if(!myroominfo.nil? and myroominfo.isConnected == true)\n result = am.setUserRole(roomname, userid, role, collectionname, nodename)\n flash[:result] = \"setUserRole result success: \" + result + \" \" + acc.roomURL + \" \" + userid + \" \" + role\n else\n result = \"Room is shutdown, this feature only available when room is started.\"\n flash[:notice] = result\n end\n\n redirect_to :action => 'accountManager'\n rescue Exception => msg\n flash[:notice] = msg\n end\n\n end"
] | [
"0.66709185",
"0.6665142",
"0.66624296",
"0.6641296",
"0.6633384",
"0.6523137",
"0.6506499",
"0.6501287",
"0.6453047",
"0.64433265",
"0.64421546",
"0.64162004",
"0.6389307",
"0.63650715",
"0.6329809",
"0.6295988",
"0.6286607",
"0.62563795",
"0.6228856",
"0.62027156",
"0.6197659",
"0.61883825",
"0.6178014",
"0.61739635",
"0.6169028",
"0.6164874",
"0.6157149",
"0.6152366",
"0.61386967",
"0.61337036",
"0.61271363",
"0.6126474",
"0.6115537",
"0.6100426",
"0.6099005",
"0.6097277",
"0.6084806",
"0.6077385",
"0.6075189",
"0.60561335",
"0.60380787",
"0.60232973",
"0.6010378",
"0.60092545",
"0.6008884",
"0.60025436",
"0.59996253",
"0.5999348",
"0.5998863",
"0.5991561",
"0.59873265",
"0.597316",
"0.59711015",
"0.5970014",
"0.5966242",
"0.5965942",
"0.59553355",
"0.59551597",
"0.5948404",
"0.5931848",
"0.5926081",
"0.5922631",
"0.59153616",
"0.591436",
"0.5913768",
"0.5908413",
"0.5891984",
"0.5887374",
"0.58853376",
"0.58852684",
"0.5880536",
"0.58792853",
"0.5877072",
"0.5875644",
"0.5874127",
"0.5873721",
"0.58688754",
"0.586825",
"0.5865168",
"0.58641714",
"0.58619285",
"0.5861585",
"0.5850405",
"0.5847355",
"0.5846548",
"0.5843883",
"0.58424133",
"0.58344686",
"0.58224756",
"0.58155",
"0.5810084",
"0.58072776",
"0.5804052",
"0.5803223",
"0.5802976",
"0.5802495",
"0.5797016",
"0.5796982",
"0.579429",
"0.57922995",
"0.57914454"
] | 0.0 | -1 |
this methode will should take in an argument of the game board and use the turn_count to determinate if is X or turn O | def current_player(board)
current_pla = turn_count(board)
if current_pla %2==0
return "X"
else
return "O"
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def turn_count(board)\n turn_count = 0\n \n board.each do |counter|\n if counter == \"X\" || counter == \"O\"\n turn_count += 1\n end\n end\n \n turn_count\n end",
"def turn_count\n counter = 0\n @board.each do |index_taken|\n if index_taken == \"X\" || index_taken == \"O\"\n counter += 1\n end\n end\n counter\n end",
"def turn_count(board)\n counter = 0\n board.each do |count_turn|\n if count_turn == \"X\" || count_turn == \"O\"\n counter += 1\n end\n end\n counter\nend",
"def turn_count\n counter = 0\n @board.each do |position|\n\n if position == \"X\" || position == \"O\"\n counter += 1\n end\n\n end\n counter\n end",
"def turn_count #counts occupied positions!\n turn = 0 #start at position 0.\n @board.each do |index| #iterating through each spot on the board. Is it occupied?\n if index == \"X\" || index == \"O\" #if there is an X or O in that spot,\n turn += 1 #then you must turn!\n end\n end\n return turn #if there is not an X or O in that spot, take your turn and put your X or O there.\nend",
"def turn_count(board)\n turn = 0\n board.each do |position|\n if position == \"X\" || position == \"O\"\n turn += 1\n end\n end\n return turn\nend",
"def turn_count(board)\n turn = 0\n board.each do |position|\n if position == \"X\" || position == \"O\"\n turn += 1\n end\n end\n return turn\nend",
"def turn_count(board)\n counter = 0\n board.each do |count|\n if count == \"X\" || count == \"O\"\n counter += 1\n end\nend\ncounter\nend",
"def turn_count\r\n turns_so_far = 0\r\n @board.each do |arr_item|\r\n if arr_item == \"X\" || arr_item ==\"O\"\r\n turns_so_far += 1\r\n end\r\n end\r\n return turns_so_far\r\n end",
"def turn_count(board)\n count=0\n board.each do |pos|\n if(pos==\"X\"||pos==\"O\")\n count+=1\n end\n end\n count\nend",
"def turn_count(board)\n turn_counter = 0\n board.each do |turn|\n if turn == \"X\" || turn == \"O\"\n turn_counter += 1\n end\n end\n turn_counter\nend",
"def turn_count(board)\n counter = 0\n board.each do |box|\n if box == \"X\" || box == \"O\"\n counter += 1\nend\nend\n return counter\nend",
"def turn_count(board)\ncounter = 0\nboard.each do |board_x|\n if board_x == \"X\" || board_x == \"O\"\ncounter += 1\n end\n end\nreturn counter\nend",
"def turn_count(board)\n num_of_turns = 0\n board.each do |spot|\n if (spot == 'X' || spot == 'O')\n num_of_turns += 1\n end\n end\n num_of_turns\nend",
"def number_of_turns_taken(board)\r\n counter = 0\r\n board.each do |token|\r\n if token == \"X\" || token ==\"O\"\r\n counter +=1\r\n end\r\n end\r\n counter\r\nend",
"def turn_count (board)\n \n turn = 0\n \n board.each do |cell| # cell = element\n if cell == \"X\" || cell == \"O\"\n turn += 1\n else \n turn += 0\n end\n end\n \n return turn\n\nend",
"def turn_count(board)\n counter = 0\n board.each do |move|\n if move == \"X\" || move == \"O\"\n counter += 1\nend\nend\ncounter\nend",
"def turn_count\n @board.count{|token| token == \"X\" || token == \"O\"}\n end",
"def turn_count(board)\n count = 0\n board.each do |value|\n if value ==\"X\" || value == \"O\"\n count +=1\n end\n end\n return count\nend",
"def turn_count\n\t\t@board.count{|token| token == \"X\" || token == \"O\"}\n\tend",
"def turn_count\n @board.count{|token| token == \"X\" || token == \"O\"} # << tweaked\n end",
"def turn_count(board)\n turn_counter = 0\n board.each do |spot|\n if spot == \"X\" || spot.upcase == \"O\"\n turn_counter +=1\n end\n end\n return turn_counter\nend",
"def turn_count\n turns = 0\n @board.each do |position|\n if position == \"X\" || position == \"O\"\n turns += 1\n end\n end\n return turns\n end",
"def turn_count(board)\n turns = 0\n board.each do |spot|\n if spot==\"X\" || spot==\"O\"\n turns += 1\n end\n end\n return turns\nend",
"def turn_count(board)\n count = 0\n board.each do |cell|\n if cell == \"X\" || cell == \"O\"\n count += 1\n end\n end\n count\nend",
"def turn_count\n counter = 0\n @board.each do |positions|\n if positions == \"X\" || positions == \"O\"\n counter += 1\n end\n end\n counter.to_i\n end",
"def turn_count\n turns = 0\n @board.each do |position| #can use .count here: board.count{|token| token == \"X\" || token == \"O\"}\n if position == \"X\" || position == \"O\"\n turns += 1\n end\n end\n turns\n end",
"def turn_count(board)\n counter = 0\n board.each do |index|\n if (index == \"X\" or index == \"O\")\n counter+=1\n end\n end\n return counter\nend",
"def turn_count\n turns = 0\n @board.each do |spot|\n if spot==\"X\" || spot==\"O\"\n turns += 1\n end\n end\n return turns\n end",
"def turn_count(board)\n count=0\n board.each do |position|\n if(position==\"X\"||position==\"O\")\n count+=1\n end\n end\n return count\nend",
"def turn_count()\n turns = 0\n \n @board.each do |position|\n if position.eql?(\"X\") || position.eql?(\"O\")\n turns += 1\n end\n end\n \n return turns\n end",
"def turn_count\n counter = 0\n @board.each do |space|\n if space == \"X\" || space == \"O\"\n counter += 1\n end\n end\n counter\n end",
"def turn_count\n counter = 0\n @board.each do|element|\n\tif (element == \"X\") || (element == \"O\")\n \tcounter += 1\n\tend\n end\n return counter\nend",
"def turn_count\n @board.count{|token| token == \"X\" || token == \"O\"}\n end",
"def turn_count(board)\n #this counter keeps track of how many occupied spcaes there are\n counter = 0\n board.each do |turn|\n if turn == \"X\" || turn == \"O\"\n counter += 1\n end\n end\nreturn counter\nend",
"def turn_count\n count = 0\n @board.each do |space|\n if space == \"X\" || space == \"O\"\n count +=1\n end\n end\n count\n end",
"def turn_count(board)\n counter = 0\n board.each do |turns|\n if turns == \"X\" || turns == \"O\"\n counter += 1\n end\n end\n counter\nend",
"def turn_count(board)\n count=0\n i=0\n while i <= 9\n if board[i] ==\"X\" || board[i] ==\"O\"\n count+=1\n end\n i+=1\n end\n count\nend",
"def turn_count(board)\n count = 0\n board.each do |element|\n if element == \"X\" || element == \"O\"\n count += 1\n end\n end\n count\nend",
"def turn_count\n counter = 0\n @board.each do|position|\n if position == \"X\" || position == \"O\"\n counter += 1 \n end\n end\n return counter\nend",
"def turn_count(board)\n counter = 0\n board.each do |element|\n if element == \"O\" || element == \"X\"\n counter += 1\n end\n end\n return counter\nend",
"def turn_count(board)\n counter = 0\n board.each do |turn|\n if turn == \"X\" || turn == \"O\"\n counter +=1\n end\n end\n return counter\nend",
"def turn_count(board)\n turn = 0\n board.each do |character|\n if character == \"X\" || character == \"O\"\n turn += 1\n end\n end\n return turn\nend",
"def turn_count(board)\n count = 0\n board.each do |cell|\n if cell == \"X\" || cell == \"O\"\n count += 1\n end\n end\n return count\nend",
"def turn_count\n @board.count{|value| (value==\"X\" || value==\"O\")}\n #counter = 0\n #@board.each do |value|\n # if value ==\"X\" || value == \"O\"\n # counter+=1\n # end\n #end\n #counter\n end",
"def turn_count(board)\n count = 0\n board.each do |occupied|\n if occupied == \"X\" || occupied == \"O\"\n #if count is =< to 9\n count = count + 1\n end\n end\n return count\nend",
"def turn_count(board)\n counter = 0\n board.each do |board_element|\n if board_element == \"X\" || board_element == \"O\"\n counter += 1\n # puts \"Board element is: #{board_element}. Occupied elements are #{counter}\"\n end\n end\n return counter\nend",
"def turn_count(board)\n count = 0\n board.each { |element|\n element == \"X\" || element == \"O\" ? count += 1 : false\n }\n count\nend",
"def turn_count(board)\r\n counter = 0\r\n board.each do |token|\r\n if token == \"X\" || token ==\"O\"\r\n counter +=1\r\n end\r\n end\r\n counter\r\nend",
"def turn_count(board)\n counter = 0;\n board.each do |slot|\n if(slot == \"X\" || slot == \"O\")\n counter += 1;\n end\n end\n return counter;\nend",
"def turn_count(board)\n\n count = 0\n board.each do |cell|\n if cell == \"X\" || cell == \"O\"\n count += 1\n end\n end\n \n return count\nend",
"def player_turn\n # if the board has more x's then it is the computer's turn\n if @board.count(@symbol) > @board.count(@computer_symbol)\n computer_turn\n else\n users_turn\n end\n end",
"def turn_count(board)\n counter = 0\n board.each do |token|\n if token == \"X\" || token == \"O\"\n counter += 1\n end\n end\n counter\nend",
"def turn_count(board)\n count = 0\n board.each { |element|\n element == \"X\" || element == \"O\" ? count += 1 : nil\n }\n count\nend",
"def turn_count(board)\n count = 0\n board.each do |index|\n index == \"X\" || index == \"O\" ? count += 1 : nil\n end\n return count\n end",
"def current_player(board)\n#takes #turn_count and checks if even, then(?) gives value of X, else(:) it is O\n turn_count(board) % 2 == 0 ? \"X\" : \"O\"\nend",
"def turn_count(board)\r\n #returns the number of of turns that have been played\r\n counter = 0\r\n board.each do|element|\r\n if (element == \"X\") || (element == \"O\")\r\n counter += 1 \r\n end\r\n end\r\n return counter\r\nend",
"def turn_count(board)\n count_moves = 0 \n board.each do |check|\n if (check == \"X\" || check == \"O\")\n count_moves += 1 \n end\n end\n return count_moves\nend",
"def turn_count(board)\n counter = 0\n board.each do |space|\n if space == \"X\" || space == \"O\";\n counter += 1;\n end\n end\n counter\nend",
"def turn_count(board)\n count = 0;\n board.each do |square|\n if square == \"X\" || square == \"O\"\n count +=1\n end\n end\n return count #return number of turns that have been played\nend",
"def turn_count (board)\n counter = 0\n board.each do |space|\n if space == \"X\" || space == \"O\"\n counter += 1\n end\n end\n return counter\nend",
"def turn_count(board)\n counter = 0\n board.each do |space|\n if space == \"X\" || space == \"O\"\n counter += 1\n end\n end\n return counter\nend",
"def turn_count\n num_turns = 0\n @board.each do |space|\n if space == \"X\" || space == \"O\"\n num_turns += 1\n end\n end\n return num_turns\n end",
"def turn_count(board)\n count = 0\n board.each do |entry|\n if (entry == \"X\" || entry == \"O\")\n count += 1\n end\n end\n count\nend",
"def turn_count(board)\n play_count = 0\n board.each do |play|\n if play == \"X\" || play == \"O\"\n play_count += 1\n end\n end\n play_count\nend",
"def turn_count(board)\n spaces_taken = 0\n board.each do |board_space|\n if board_space == \"X\" || board_space == \"O\"\n spaces_taken += 1 \n end\n end\n return spaces_taken\nend",
"def turn_count(board)\n count = 0\n board.each do |space|\n if (space == 'X' || space == 'O')\n count += 1\n end\n end\n return count\nend",
"def current_player(board)\n turn_count(board)\n if turn_count(board).even?\n \"X\"\n else \n \"O\"\n end\nend",
"def turn_count\n turns = @board.count { |i| i == 'X' || i == 'O' }\n turns\nend",
"def turn_count(board)\n count= 0\n board.each do |token|\n if token == \"X\" || token == \"O\"\n count +=1\n end\n end\nreturn count\nend",
"def current_player(board)\nturn_count(board).even? ? \"X\":\"O\"\nend",
"def move(board)\n if !board.taken?(\"5\")\n board = \"5\"\n elsif board.turn_count == 1\n board = \"1\"\n elsif board.turn_count == 2\n board = [\"3\", \"7\", \"9\"].detect do |position|\n !board.taken?(position)\n end\n elsif board.turn_count > 2 && board.turn_count < 6\n win_combo_array = []\n Game::WIN_COMBINATIONS.select do |win_combo|\n if ((board.position((win_combo[0] + 1).to_s) == opponent_token) && (board.position((win_combo[1] + 1).to_s) == opponent_token)) ||\n ((board.position((win_combo[1] + 1).to_s) == opponent_token) && (board.position((win_combo[2] + 1).to_s) == opponent_token)) ||\n ((board.position((win_combo[2] + 1).to_s) == opponent_token) && (board.position((win_combo[0] + 1).to_s) == opponent_token)) == true\n win_combo_array << win_combo\n end\n end\n number_at_index = win_combo_array[0].select do |number|\n position_number = (number + 1).to_s\n if board.position(position_number) != opponent_token\n position_number\n end\n end\n board = (number_at_index[0] + 1).to_s\n else\n win_combo_array = []\n Game::WIN_COMBINATIONS.select do |win_combo|\n if ((board.position((win_combo[0] + 1).to_s) == token) && (board.position((win_combo[1] + 1).to_s) == token)) ||\n ((board.position((win_combo[1] + 1).to_s) == token) && (board.position((win_combo[2] + 1).to_s) == token)) ||\n ((board.position((win_combo[2] + 1).to_s) == token) && (board.position((win_combo[0] + 1).to_s) == token)) == true\n win_combo_array << win_combo\n end\n end\n number_at_index = win_combo_array[0].select do |number|\n position_number = (number + 1).to_s\n if board.position(position_number) != opponent_token\n position_number\n end\n end\n board_n = (number_at_index[0] + 1).to_s\n board = final_moves(board, board_n)\n end\n end",
"def turn_count(board)\n turns = 0\n board.each do | space |\n if space == \"X\" || space == \"O\"\n turns += 1\n end\n end\n return turns\nend",
"def turn_count(board)\n board.count{|element| element == \"X\" || element == \"O\"}\nend",
"def turn_count(board)\n counter = 0\n board.each do |q| \n if q != \" \"\n counter += 1\n #if q == \"X\" \n #count_x = count_x + 1 \n#elsif q == \"O\"\n #count_o = count_o + 1 \nend\nend\n#return count_x\n#return count_o\nreturn counter\nend",
"def turn_count\n count = 0\n @board.each do |token|\n if token.downcase == \"x\" || token.downcase == \"o\"\n count += 1\n end\n end\n count\nend",
"def turn_count(board)\n x_counter = 0\n o_counter = 0\n board.each do |spot|\n\n if spot == \"X\"\n x_counter += 1\n\n elsif spot == \"O\"\n o_counter += 1\n end\n end\n\nreturn x_counter + o_counter\nend",
"def turn_count\n count = 0\n @board.each do |player|\n if player == 'X' || player == 'O'\n count += 1\n end\n end\n return count\n end",
"def turn_count(board)\n counter = 0\n board.each do |turn|\n if turn == \"X\" || turn == \"O\"\n counter += 1\n puts \"#{counter}\"\n end\n end\n counter\nend",
"def turn\n marker = @turn_count.even? ? marker = \"X\" : marker = \"O\"\n move\n print \"Current board: \"\n show_board\n @player +=1\n end",
"def current_player(board)\n if turn_count(board) % 2 == 0\n return \"X\" #even turn count == X\n end \n return \"O\" #odd turn count == O \nend",
"def turn_count(board)\n board.count {|token| token == \"X\" || token == \"O\"}\n end",
"def turn_count(board)\n counter = 0\n board.each do | move |\n if move == \"X\" || move == \"O\"\n puts counter += 1\n end\n end\n counter #This should be the number of turns\nend",
"def turn_count(board)\n board.count{|token| token == \"X\" || token == \"O\"}\nend",
"def current_player(board)\nturn_count(board).even? ? \"X\" : \"O\"\nend",
"def turn_count\n x_turns = @board.count(\"X\")\n o_turns = @board.count(\"O\")\n turn_count = x_turns + o_turns\n return turn_count\nend",
"def turn_count(board)\n counter = 0\n board.each do |i|\n if i == \"X\" || i == \"O\"\n counter += 1\n end\n end\n puts counter\n return counter\nend",
"def turn_count #\n@board.count{|token| token == \"X\" || token == \"O\"}\nend",
"def current_player(board)\nif turn_count(board) % 2 == 0\n \"X\"\nelsif turn_count(board) % 2 == 1\n \"O\"\n end\nend",
"def play(board)\nnum_of_turns = 0\n until num_of_turns == 9 || over?(board) == true\n turn(board)\n num_of_turns += 1\n end\n if over?(board) == true && won?(board) == \"X\"\n puts \"Congratulations X!\"\n elsif\n over?(board) == true && won?(board) == \"O\"\n puts \"Congratulations O!\"\n elsif\n over?(board) == true && won?(board) != true\n puts \"Cats Game!\"\n else\n\n end\nend",
"def turn_count(board)\n turn_counter = 0\n (board.length).times do |i|\n if position_taken?(board, i)\n turn_counter += 1\n end\n end\n\n return turn_counter\nend",
"def turn_count(board)\n counter = 0 #counter is set to 0 before the for each loop interation begins\n board.each do |turn_count| # the variable turn_count can be called anything, this is a new local variable\n #puts \"Loop number #{counter} gets tuuurn: #{turn_count}\"\n if (turn_count == \"X\") || (turn_count == \"O\") # this if statement is within the loop iteration\n #puts \"Loop number #{counter} gets turn: #{turn_count}\"\n #puts turn_count # i'm outputing the turn_count to see it, but the 'return counter' below is what passes the test\n counter += 1 #adding 1 to the counter for each \"X\" and \"O\" in the board array\n end #end if statement\n end #end for...each loop iteration\n return counter # the counter holds the number of turns, and is what passes the test\nend",
"def current_player(board)\n turns = turn_count(board)\n if turns.odd?\n return \"O\"\n else\n return \"X\"\n end\nend",
"def current_player(board)\n count = turn_count(board)\n if count.even?\n return \"X\"\n else\n return \"O\"\n end\nend",
"def current_player(board)\n count = turn_count(board)\n count.even? ? \"X\" : \"O\"\nend",
"def turn_count(board)\n counter = 0\n board.each {|char|\n if char == \"O\" || char == \"X\"\n counter += 1\n end\n }\n return counter\nend",
"def turn_count(board)\n board.count { |chr| chr ==\"X\" || chr == \"O\"}\nend",
"def current_player(board)\n if turn_count(board).odd?\n \"O\"\n else\n \"X\"\n end\nend",
"def current_player(board)\n \n num_turns = turn_count(board)\n \n if num_turns % 2 == 0\n return \"X\"\n else\n return \"O\"\n end\n\nend",
"def turn_count\n count = 0\n cells.each {|pos| count += 1 if pos == \"X\" || pos == \"O\"}\n count\n end",
"def turn_count(board)\n x = board.select {|i| i == \"O\" || i == \"X\"}\n count = x.size\nend"
] | [
"0.8072727",
"0.7915733",
"0.79138714",
"0.7856606",
"0.78400487",
"0.78354377",
"0.78354377",
"0.78343827",
"0.78080595",
"0.78058624",
"0.77954924",
"0.77886987",
"0.7786069",
"0.77824855",
"0.7779163",
"0.7755597",
"0.77447635",
"0.77155644",
"0.771409",
"0.77134347",
"0.77088696",
"0.77010506",
"0.76990944",
"0.7697625",
"0.76976",
"0.76965195",
"0.7693264",
"0.7689981",
"0.76813257",
"0.7669946",
"0.76592076",
"0.7656959",
"0.76495796",
"0.7648347",
"0.76438427",
"0.7642897",
"0.76219445",
"0.762056",
"0.76204306",
"0.7619293",
"0.76041436",
"0.7602204",
"0.7600685",
"0.7593948",
"0.7590835",
"0.7590574",
"0.7586457",
"0.7585926",
"0.75848085",
"0.75843406",
"0.75819975",
"0.75789183",
"0.7568699",
"0.7565774",
"0.7563787",
"0.7559414",
"0.75542384",
"0.7551793",
"0.7548203",
"0.7547154",
"0.75384074",
"0.7532452",
"0.75314415",
"0.75216985",
"0.7521073",
"0.75047606",
"0.7497447",
"0.7497346",
"0.7492332",
"0.74825186",
"0.74777627",
"0.7475502",
"0.746092",
"0.7458241",
"0.7451198",
"0.74359447",
"0.74340016",
"0.74066895",
"0.7396255",
"0.738494",
"0.7381386",
"0.736601",
"0.7353969",
"0.7343822",
"0.73425364",
"0.73171186",
"0.73127556",
"0.7307325",
"0.7300379",
"0.72758335",
"0.72710586",
"0.7270739",
"0.7264832",
"0.72567725",
"0.7253274",
"0.7246584",
"0.72428185",
"0.72393024",
"0.72272694",
"0.7216098",
"0.7190407"
] | 0.0 | -1 |
GET /host_states GET /host_states.json | def index
@host_states = HostState.all
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_states\n perform(:get, 'enum/states', nil, nonauth_headers).body\n end",
"def device_states_list\n get \"deviceStates\"\n end",
"def index\n @api_states = Api::State.all\n end",
"def index\n @project_states = ProjectState.all\n json_response(@project_states)\n end",
"def get_state\n @states = State.find_state(params[:id])\n respond_to do |format|\n format.json { render :json => @states }\n end\n end",
"def index\n @states = State.all\n \n render json: @states\n end",
"def index\n @states = State.all\n respond_with(@states)\n end",
"def index\n @states = State.all\n end",
"def index\n @states = State.all\n end",
"def index\n @states = State.all\n end",
"def index\n @states = State.all\n end",
"def index\n @states = State.all\n end",
"def _region_states(region_id)\n get('region/states', region_id)\n end",
"def device_states_get(device_name)\n get \"deviceStates/#{device_name}\"\n end",
"def states; @_hegemon_states.keys; end",
"def states\n [\n ['0', 'HOSTS_LOADED', 'Hosts table has been populated with host data']\n ]\n end",
"def index\n @program_states = ProgramState.all\n end",
"def index\n @event_states = EventState.all\n end",
"def index\n @action_states = ActionState.all\n end",
"def get_state\n @client.get_state\n end",
"def index\n @user_states = UserState.all\n end",
"def index\n @user_states = UserState.all\n end",
"def set_host_state\n @host_state = HostState.find(params[:id])\n end",
"def show\n json_response(@project_state)\n end",
"def show\n render json: @state\n end",
"def index\n @page_title = 'States'\n @states = State.all\n end",
"def index\n @task_states = TaskState.all\n end",
"def states\n @attributes[\"data\"][\"states\"]\n end",
"def list_alert_state(project_name, optional={})\n\t\targs = self.class.new_params\n\t\targs[:method] = 'GET'\n\t\targs[:path]['ProjectName'] = project_name\n\t\targs[:pattern] = '/projects/[ProjectName]/alerts_state'\n\t\targs[:query]['Action'] = 'ListAlertState'\n\t\targs[:region] = optional[:_region] if (optional.key? :_region)\n\t\targs[:scheme] = 'http'\n\t\tif optional.key? :alert_name\n\t\t\targs[:query]['AlertName'] = optional[:alert_name]\n\t\tend\n\t\tif optional.key? :dimensions\n\t\t\targs[:query]['Dimensions'] = optional[:dimensions]\n\t\tend\n\t\tif optional.key? :end_time\n\t\t\targs[:query]['EndTime'] = optional[:end_time]\n\t\tend\n\t\tif optional.key? :page\n\t\t\targs[:query]['Page'] = optional[:page]\n\t\tend\n\t\tif optional.key? :page_size\n\t\t\targs[:query]['PageSize'] = optional[:page_size]\n\t\tend\n\t\tif optional.key? :start_time\n\t\t\targs[:query]['StartTime'] = optional[:start_time]\n\t\tend\n\t\tself.run(args)\n\tend",
"def create\n @host_state = HostState.new(host_state_params)\n\n respond_to do |format|\n if @host_state.save\n format.html { redirect_to @host_state, notice: 'Host state was successfully created.' }\n format.json { render :show, status: :created, location: @host_state }\n else\n format.html { render :new }\n format.json { render json: @host_state.errors, status: :unprocessable_entity }\n end\n end\n end",
"def states\r\n @states.collect {|id| $data_states[id] }\r\n end",
"def index\n @states = State.order(\"name\").page(params[:page]).per(50)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @states }\n end\n end",
"def get_services_state()\n file = File.read('config/status.json')\n data_hash = JSON.parse(file)\n return data_hash\n end",
"def index\n @asset_states = AssetState.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @asset_states }\n end\n end",
"def get_agent_online_state\n reply = @client.call(:get_agent_online_state)\n data = reply.body.dig(:get_agent_online_state_response,\n :get_agent_online_state_result,\n :array_of_string)\n data = check_if_data_exists(data)\n\n data.map do |attrs|\n {\n agent_id: attrs[:string][0],\n name: attrs[:string][1],\n team: attrs[:string][2],\n # Some states are randomly capitalized and include <> brackets, the brackets are trimmed out\n # and each individual word in the state is capitalized.\n # Unicode characters require a workaround using mb_chars.\n status: normalize_unicode_string(attrs[:string][3]),\n time_in_status: attrs[:string][4]\n }\n end\n rescue Savon::HTTPError => error\n Rails.logger.debug error.http.code\n return []\n end",
"def get_state\n send_request(FUNCTION_GET_STATE, [], '', 2, '? ?')\n end",
"def states\n @states ||= {}\n end",
"def index\n @states = @country.states.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @states }\n end\n end",
"def us_states\n response = get('AllUSStates')\n StoreStatesSerializer.new([]).from_xml(response) #.map(&:state)\n end",
"def states\n []\n end",
"def states\n []\n end",
"def states\n @finity.states.map { |name, _| name }\n end",
"def index\n @process_states = ProcessState.all\n end",
"def show\n @state = State.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @state }\n end\n end",
"def show\n @state = State.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @state }\n end\n end",
"def index\n @sprint_states = SprintState.all\n end",
"def state_events(state)\n api_return = RestClient.get('https://app.ticketmaster.com/discovery/v2/events.json?stateCode=' + state + '&apikey=' + $ticket_master_api_key)\n JSON.parse(api_return)\nend",
"def show\n @map_state = MapState.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @map_state }\n end\n end",
"def all_state(id)\n $app_sessions[id][:state]\nend",
"def show\n \n @states = State.find(:all)\n @state = State.find(params[:id], :include => [ { :offices => :office_type }, {:offices => :incumbents }])\n @us_senator_offices = []\n @us_rep_offices = []\n @state_senator_offices = []\n @state_rep_offices = []\n @state.offices.each do |o|\n case o.office_type.ukey\n when 'US_SENATOR'\n @us_senator_offices.push(o)\n when 'US_REP'\n @us_rep_offices.push(o)\n when 'HOUSE_DELEGATE'\n @us_rep_offices.push(o)\n when 'STATE_SENATOR'\n @state_senator_offices.push(o)\n when 'STATE_REP'\n @state_rep_offices.push(o)\n end\n end\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @state }\n end\n end",
"def update\n respond_to do |format|\n if @host_state.update(host_state_params)\n format.html { redirect_to @host_state, notice: 'Host state was successfully updated.' }\n format.json { render :show, status: :ok, location: @host_state }\n else\n format.html { render :edit }\n format.json { render json: @host_state.errors, status: :unprocessable_entity }\n end\n end\n end",
"def get_state\n@state.keys\nend",
"def get_state\n \"Treasury state: #{@resources}\"\n end",
"def backup_dashboards(http_auth, graphite_server)\n board_names = JSON.parse(http_auth.post(\"#{graphite_server}/dashboard/find/\", form: {query:''}).to_s)['dashboards'].map{|n|n['name']}\n\n board_names.map do |b|\n dat=http_auth.get(\"#{graphite_server}/dashboard/load/#{b}\").to_s\n state_str=JSON.parse(dat)['state'].to_json # :(\n puts state_str\n end\nend",
"def by_state\n \tdata = City.where('state_id = ?', params[:state_id]).order(:name)\n \trespond_to do |format|\n \t\tformat.json {render :json => data, :status => 200}\n \tend\n end",
"def host_state_params\n params.require(:host_state).permit(:name)\n end",
"def index\n @modelstates = Modelstate.all\n end",
"def known_states; end",
"def known_states; end",
"def known_states; end",
"def read_state(hyp, machine)\n output = hyp.exec(\"vmadm get #{machine.id}\")\n\n if output.exit_code != 0 || output.stderr.chomp =~ /No such zone configured/ || output.stdout == \"\"\n nil\n else\n JSON.load(output.stdout)\n end\n end",
"def query_contact_states(options = nil)\n require_relative 'contactservicestate'\n @api.cursor(ContactServiceState, get_base_api_path() + \"/states\", options)\n end",
"def destroy\n @host_state.destroy\n respond_to do |format|\n format.html { redirect_to host_states_url, notice: 'Host state was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def index_hosts\n load_service\n return if (@service.blank?)\n\n # Preload hosts\n @hosts = Host.where(:_id.in => @service.host_ids)\n\n respond_to do |format|\n format.html\n end\n end",
"def display_all_waiaria_states\n # Interface method\n end",
"def fetch_node_manager_states\n states = {}\n @zk.children(current_state_root).each do |child|\n full_path = \"#{current_state_root}/#{child}\"\n begin\n states[child] = symbolize_keys(decode(@zk.get(full_path).first))\n rescue ZK::Exceptions::NoNode\n # ignore, this is an edge case that can happen when a node manager\n # process dies while fetching its state\n rescue => ex\n logger.error(\"Failed to fetch states for #{full_path}: #{ex.inspect}\")\n end\n end\n states\n end",
"def select_state state\r\n select state, :from => \"client_state_phys\"\r\n end",
"def show\n @host = Host.find(params[:id])\n\n render json: @host\n end",
"def connection_states\n hash = {}\n names = STATENAMES.reverse\n \n STATECODES.sort.each_with_index do |code, index|\n hash[names[index]] = code\n end\n \n return hash\n end",
"def state\r\n\t\t\t`#{BITS::BITSADMIN} /getstate {#{@id}}`\r\n\t\tend",
"def show\n @state = State.find(params[:id])\n end",
"def states(refresh=false)\n @states ||= []\n assert_states if refresh\n @states\n end",
"def state\n status[\"state\"]\n end",
"def state_list\n @state_list=State.find_all_by_country_id(params[:city][:value])\n end",
"def state\n address.state\n end",
"def [](query)\n query(query).states || []\n end",
"def state_str\n HOST_STATES[state]\n end",
"def state_list(states)\n ul(states.map { |s| state_short s })\n end",
"def index\n @state_types = StateType.all\n end",
"def index\n @states = State.sorted # sorted defined in model, creating instance variable @states\n end",
"def states; end",
"def search\n @states = State::ALL_STATES\n end",
"def pull_states\n render template: 'responses/pull_states.xml.builder', layout: false\n end",
"def index\n @tutorial_states = Tutorial::State.all\n\n respond_to do |format|\n format.html do\n @tutorial_states = Tutorial::State.paginate(:page => params[:page], :per_page => 50) \n @paginate = true \n end\n format.json { render json: @tutorial_states }\n end\n end",
"def index\n @host_addresses = HostAddress.all\n end",
"def index\n @federal_states = FederalState.all\n end",
"def available_states\n states = []\n states << :passive if passive?\n states << :pending if passive? || pending?\n states << :active\n states << :suspended unless deleted?\n states << :deleted\n states\n end",
"def index\n @locations = Location.find(:all, :order => 'state, name ASC')\n @states = []\n @locations.each do |loc|\n @states << loc.state unless loc.state.nil?\n end\n @states.uniq!\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @locations }\n end\n end",
"def runstate(host, port, opts = {})\n cmd = opts[:cmd]\n if [:pause, :wait, :resume].include?(cmd.to_sym)\n path = \"/runstate/#{cmd.to_s}\"\n else\n path = \"/runstate/query\"\n end\n\n uri = URI::HTTP.build(host: host, port: port, path: path)\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n response = http.get(uri.path, \"Accept\" => \"application/json\")\n end",
"def show\n @state = @country.states.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @state }\n end\n end",
"def index\n @united_states = UnitedState.all\n end",
"def state_machines\n @state_machines ||= begin\n if state_machines = object['state_machines']\n state_machines.each do |_name, machine|\n serializer.serialize(state_machine_image_path(machine), machine[:image]) if machine[:image]\n end\n end\n end\nend",
"def show\n @host = Host.find_by(hostname: params[:id])\n\n render json: @host\n end",
"def determine_states(query, exact_match = false)\n states_map = FinalAPI::V1::Http::DDTF_Build::BUILD_STATE2API_V1STATUS\n states_map.reject { |k,v| k == '' }.each_with_object([]) do |(travis_state, ddtf_state), out|\n if exact_match\n out << travis_state if ddtf_state.downcase == query.downcase\n else\n out << travis_state if ddtf_state.downcase.include? query.downcase\n end\n end.compact\n end",
"def state(params = {})\n response = client.get \"/_cluster/state{/metrics}{/indices}\", params.merge(action: \"cluster.state\", rest_api: \"cluster.state\")\n response.body\n end",
"def state\n domain_info[:state]\n end",
"def state\n domain_info[:state]\n end",
"def index\n @state_statistics = StateStatistic.all\n end",
"def cities_in_state\n cities = State.find(params[:id]).cities.order(name: :asc)\n\n render json: cities.to_json(), status: :ok\n end",
"def failed_states\n old_states = @states\n @states = {}\n\n new_states = Admiral.etcd.get(STATE_KEY).children\n new_states.each do |service_state|\n service_key = service_state.key.split('/').last\n service_data = JSON.parse(service_state.value)\n was_ok = old_states[service_key]['subState'] != 'failed' rescue true\n is_failed = service_data['subState'] == 'failed'\n\n yield service_key, service_data if was_ok && is_failed\n\n @states[service_key] = service_data \n end\n end"
] | [
"0.730695",
"0.70312136",
"0.7012061",
"0.6694319",
"0.6684666",
"0.6555589",
"0.6412258",
"0.63435274",
"0.63435274",
"0.63435274",
"0.63435274",
"0.63435274",
"0.6295455",
"0.6283629",
"0.6279372",
"0.62609327",
"0.61760485",
"0.6172597",
"0.6142149",
"0.61174744",
"0.61124873",
"0.61124873",
"0.6110715",
"0.61059034",
"0.61027414",
"0.6061533",
"0.6060778",
"0.60550165",
"0.6041428",
"0.603983",
"0.6038117",
"0.6028153",
"0.6013957",
"0.5942791",
"0.5934835",
"0.59246",
"0.5924405",
"0.5920413",
"0.59125507",
"0.5894923",
"0.5894923",
"0.5874388",
"0.5866262",
"0.58572847",
"0.58572847",
"0.58560616",
"0.5852477",
"0.5848208",
"0.5839352",
"0.5820922",
"0.5819521",
"0.5727963",
"0.5693632",
"0.56814826",
"0.5666814",
"0.5665396",
"0.5644958",
"0.56018096",
"0.56018096",
"0.56018096",
"0.55964977",
"0.5589818",
"0.5581591",
"0.5554606",
"0.5541521",
"0.55352443",
"0.55343574",
"0.55302995",
"0.5523045",
"0.55152446",
"0.55128694",
"0.55046165",
"0.549464",
"0.5492154",
"0.54878",
"0.54778665",
"0.54723376",
"0.5456316",
"0.54523283",
"0.54510975",
"0.54499197",
"0.54498416",
"0.54274976",
"0.54267627",
"0.5422311",
"0.54109764",
"0.54086554",
"0.5405252",
"0.5403643",
"0.5399939",
"0.5398551",
"0.5391653",
"0.5390602",
"0.5387971",
"0.5379994",
"0.5371655",
"0.5371655",
"0.5358931",
"0.53493077",
"0.5345617"
] | 0.7808806 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.