code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from EEETools.MainModules import Block
import xml.etree.ElementTree as ETree
from EEETools import costants
class Separator(Block):
def __init__(self, inputID, main_class):
Block.__init__(self, inputID, main_class)
self.type = "separator"
def is_ready_for_calculation(self):
return len(self.input_connections) >= 1 and len(self.output_connections) >= 1
def initialize_connection_list(self, input_list):
for elem in input_list:
new_conn = self.main_class.find_connection_by_index(abs(elem))
if not new_conn is None:
is_input = (elem > 0)
self.add_connection(new_conn, is_input)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
self.__add_connection_by_index(fluid_connections, "input")
self.__add_connection_by_index(fluid_connections, "output")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"flow input" : [1, False],
"flow output": [2, True]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "flow output", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In a separator flow rate is not preserved, hence an empty list is returned
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In a separator fluid type is preserved, hence if "input_connection" stream is connected to the
# block the methods returns each fluid stream connected to it
if self.connection_is_in_connections_list(input_connection):
return self.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In a separator pressure is preserved, hence if "input_connection" stream is connected to the
# block the methods returns each fluid stream connected to it
if self.connection_is_in_connections_list(input_connection):
return self.get_fluid_stream_connections()
else:
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/separator.py | separator.py |
from EEETools.MainModules import Block
from EEETools.MainModules.support_blocks import Drawer
import xml.etree.ElementTree as ETree
from EEETools import costants
class Compressor(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
self.type = "compressor"
self.has_support_block = True
self.support_block.append(Drawer(main_class, self, is_input=False, allow_multiple_input=False))
def add_connection_to_support_block(self, new_connection, is_input):
self.support_block[0].add_connection(new_connection, is_input)
def is_ready_for_calculation(self):
return len(self.output_connections) >= 1 and len(self.support_block[0].output_connections) >= 1 and len(
self.support_block[0].input_connections) >= 1
def initialize_connection_list(self, input_list):
new_conn_power = self.main_class.find_connection_by_index(abs(input_list[0]))
new_conn_input_flow = self.main_class.find_connection_by_index(abs(input_list[1]))
new_conn_output_flow = self.main_class.find_connection_by_index(abs(input_list[2]))
new_conn_power.is_fluid_stream = False
self.add_connection(new_conn_power, is_input=True)
self.add_connection(new_conn_input_flow, is_input=True, append_to_support_block=0)
self.add_connection(new_conn_output_flow, is_input=False, append_to_support_block=0)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.support_block[0].external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.support_block[0].external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
mechanical_connections = ETree.SubElement(xml_connection_list, "MechanicalConnections")
for input_connection in self.external_input_connections:
input_xml = ETree.SubElement(mechanical_connections, "input")
input_xml.set("index", str(input_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
mechanical_connections = input_list.find("MechanicalConnections")
self.__add_connection_by_index(fluid_connections, "input", append_to_support_block=0)
self.__add_connection_by_index(fluid_connections, "output", append_to_support_block=0)
self.__add_connection_by_index(mechanical_connections, "input")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"power input": [0, False],
"flow input": [1, False],
"flow output": [2, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "flow output", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In the compressor flow rate is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In the compressor fluid type is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In the compressor pressure is not preserved, hence an empty list is returned
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/compressor.py | compressor.py |
from EEETools.MainModules import Block
from EEETools.MainModules.support_blocks import Drawer
import xml.etree.ElementTree as ETree
from EEETools import costants
class Pump(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
self.type = "pump"
self.has_support_block = True
self.support_block.append(Drawer(main_class, self, is_input=False, allow_multiple_input=False))
def add_connection_to_support_block(self, new_connection, is_input):
self.support_block[0].add_connection(new_connection, is_input)
def is_ready_for_calculation(self):
return len(self.output_connections) >= 1 and len(self.support_block[0].output_connections) >= 1 and len(self.support_block[0].input_connections) >= 1
def initialize_connection_list(self, input_list):
new_conn_power = self.main_class.find_connection_by_index(abs(input_list[0]))
new_conn_input_flow = self.main_class.find_connection_by_index(abs(input_list[1]))
new_conn_output_flow = self.main_class.find_connection_by_index(abs(input_list[2]))
self.add_connection(new_conn_power, is_input=True)
self.add_connection(new_conn_input_flow, is_input=True, append_to_support_block = 0)
self.add_connection(new_conn_output_flow, is_input=False, append_to_support_block = 0)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.support_block[0].external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.support_block[0].external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
mechanical_connections = ETree.SubElement(xml_connection_list, "MechanicalConnections")
for input_connection in self.external_input_connections:
input_xml = ETree.SubElement(mechanical_connections, "input")
input_xml.set("index", str(input_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
mechanical_connections = input_list.find("MechanicalConnections")
self.__add_connection_by_index(fluid_connections, "input", append_to_support_block=0)
self.__add_connection_by_index(fluid_connections, "output", append_to_support_block=0)
self.__add_connection_by_index(mechanical_connections, "input")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"power input": [0, False],
"flow input": [1, False],
"flow output": [2, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "flow output", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In a pump the flow rate is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In a pump the fluid type is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In a pump the pressure is not preserved, hence an empty list is returned
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/pump.py | pump.py |
from EEETools.MainModules import Block
import xml.etree.ElementTree as ETree
from EEETools import costants
class Mixer(Block):
def __init__(self, inputID, main_class):
Block.__init__(self, inputID, main_class)
self.type = "mixer"
def is_ready_for_calculation(self):
return len(self.input_connections) >= 1 and len(self.output_connections) >= 1
def initialize_connection_list(self, input_list):
for elem in input_list:
new_conn = self.main_class.find_connection_by_index(abs(elem))
if not new_conn is None:
is_input = (elem > 0)
self.add_connection(new_conn, is_input)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
self.__add_connection_by_index(fluid_connections, "input")
self.__add_connection_by_index(fluid_connections, "output")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"flow input": [1, True],
"flow output": [2, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "flow output", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In a mixer the flow rate is not preserved, hence an empty list is returned
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In a mixer the fluid type is preserved, hence if "input_connection" stream is connected to the
# block the methods returns each fluid stream connected to it
if self.connection_is_in_connections_list(input_connection):
return self.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In a mixer the pressure is preserved, hence if "input_connection" stream is connected to the
# block the methods returns each fluid stream connected to it
if self.connection_is_in_connections_list(input_connection):
return self.get_fluid_stream_connections()
else:
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/mixer.py | mixer.py |
from EEETools.MainModules import Block
from EEETools.MainModules.support_blocks import Drawer
import xml.etree.ElementTree as ETree
from EEETools import costants
# EXACTLY THE SAME AS HEAT_EXCHANGER (Maintained only to keep the analogy with Matlab application)
class Evaporator(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
self.type = "evaporator"
self.has_support_block = True
self.support_block.append(Drawer(main_class, self, is_input=True, allow_multiple_input=False))
self.support_block.append(Drawer(main_class, self, is_input=False, allow_multiple_input=False))
def add_new_drawer(self, is_input):
self.support_block.append(Drawer(self.main_class, self, is_input=is_input, allow_multiple_input=False))
def is_ready_for_calculation(self):
for supp_block in self.support_block:
if not supp_block.is_ready_for_calculation:
return False
return True
def initialize_connection_list(self, input_list):
new_conn_input_product = self.main_class.find_connection_by_index(abs(input_list[0]))
new_conn_output_product = self.main_class.find_connection_by_index(abs(input_list[1]))
self.add_connection(new_conn_input_product, is_input=True, append_to_support_block=1)
self.add_connection(new_conn_output_product, is_input=False, append_to_support_block=1)
for elem in input_list[2:]:
new_conn = self.main_class.find_connection_by_index(abs(elem))
if new_conn is not None:
is_input = (elem > 0)
self.add_connection(new_conn, is_input=is_input, append_to_support_block=0)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fuels_connections = ETree.SubElement(xml_connection_list, "FuelsConnections")
product_connections = ETree.SubElement(xml_connection_list, "ProductConnections")
for support_block in self.support_block:
if support_block.is_input:
main_tree = ETree.SubElement(fuels_connections, "Block")
else:
main_tree = ETree.SubElement(product_connections, "Block")
for input_connection in support_block.external_input_connections:
input_xml = ETree.SubElement(main_tree, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in support_block.external_output_connections:
output_xml = ETree.SubElement(main_tree, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fuels_connections = input_list.find("FuelsConnections")
product_connections = input_list.find("ProductConnections")
self.__add_support_blocks(len(fuels_connections.findall("Block")), True)
self.__add_support_blocks(len(product_connections.findall("Block")), False)
i = 0
support_block_array = self.input_support_block
for connection in fuels_connections.findall("Block"):
self.__add_connection_by_index(connection, "input", append_to_support_block=support_block_array[i])
self.__add_connection_by_index(connection, "output", append_to_support_block=support_block_array[i])
i = i + 1
i = 0
support_block_array = self.output_support_block
for connection in product_connections.findall("Block"):
self.__add_connection_by_index(connection, "input", append_to_support_block=support_block_array[i])
self.__add_connection_by_index(connection, "output", append_to_support_block=support_block_array[i])
i = i + 1
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block = append_to_support_block)
def __add_support_blocks(self, n_support_blocks, is_input):
for i in range(1, n_support_blocks):
self.add_new_drawer(is_input)
@property
def input_support_block(self) -> list:
return_list = list()
for support_block in self.support_block:
if support_block.is_input:
return_list.append(support_block)
return return_list
@property
def output_support_block(self) -> list:
return_list = list()
for support_block in self.support_block:
if not support_block.is_input:
return_list.append(support_block)
return return_list
@classmethod
def return_EES_needed_index(cls):
return_dict = {"input_1": [1, False],
"output_1": [1, False],
"input_2": [1, False],
"output_2": [1, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "input_1", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "output_1", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity_1": {"variables": variables_list, "related_option": "none"}})
variables_list = [{"variable": "input_2", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "output_2", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity_2": {"variables": variables_list, "related_option": "none"}})
variables_list = [{"variable": "input_1", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "output_1", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity_1": {"variables": variables_list, "related_option": "none"}})
variables_list = [{"variable": "input_2", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "output_2", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity_2": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
connected_drawer = None
for drawer in self.support_block:
if drawer.connection_is_in_connections_list(input_connection):
connected_drawer = drawer
break
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In the evaporator flow rate is preserved for each drawer, hence the program identify the drawer to which
# "input_connection" stream is connected and returns each fluid stream connected to that block
if connected_drawer is not None:
return connected_drawer.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In the evaporator fluid type is preserved for each drawer, hence the program identify the drawer to which
# "input_connection" stream is connected and returns each fluid stream connected to that block
if connected_drawer is not None:
return connected_drawer.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In the evaporator pressure is preserved for each drawer, hence the program identify the drawer to which
# "input_connection" stream is connected and returns each fluid stream connected to that block
if connected_drawer is not None:
return connected_drawer.get_fluid_stream_connections()
else:
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/evaporator.py | evaporator.py |
from EEETools.MainModules import Block
from EEETools.MainModules.support_blocks import Drawer
import xml.etree.ElementTree as ETree
from EEETools import costants
class HeatExchanger(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
self.type = "heat exchanger"
self.has_support_block = True
self.support_block.append(Drawer(main_class, self, is_input=True, allow_multiple_input=False))
self.support_block.append(Drawer(main_class, self, is_input=False, allow_multiple_input=False))
def add_new_drawer(self, is_input):
self.support_block.append(Drawer(self.main_class, self, is_input=is_input))
def is_ready_for_calculation(self):
for supp_block in self.support_block:
if not supp_block.is_ready_for_calculation:
return False
return True
def initialize_connection_list(self, input_list):
if str(input_list[0]) in ["Heat Exchanger", "Scambiatore"]:
new_conn_input_product = self.main_class.find_connection_by_index(abs(input_list[1]))
new_conn_output_product = self.main_class.find_connection_by_index(abs(input_list[2]))
new_conn_input_fuel = self.main_class.find_connection_by_index(abs(input_list[3]))
new_conn_output_fuel = self.main_class.find_connection_by_index(abs(input_list[4]))
self.add_connection(new_conn_input_product, is_input=True, append_to_support_block=1)
self.add_connection(new_conn_output_product, is_input=False, append_to_support_block=1)
self.add_connection(new_conn_input_fuel, is_input=True, append_to_support_block=0)
self.add_connection(new_conn_output_fuel, is_input=False, append_to_support_block=0)
elif str(input_list[0]) in ["Heat Exchanger - Multi Fuel", "Scambiatore - Multi Fuel"]:
new_conn_input_product = self.main_class.find_connection_by_index(abs(input_list[1]))
new_conn_output_product = self.main_class.find_connection_by_index(abs(input_list[2]))
self.add_connection(new_conn_input_product, is_input=True, append_to_support_block=1)
self.add_connection(new_conn_output_product, is_input=False, append_to_support_block=1)
for elem in input_list[3:]:
new_conn = self.main_class.find_connection_by_index(abs(elem))
if not new_conn is None:
is_input = (elem > 0)
self.add_connection(new_conn, is_input=is_input, append_to_support_block=0)
else:
new_conn_input_fuel = self.main_class.find_connection_by_index(abs(input_list[1]))
new_conn_output_fuel = self.main_class.find_connection_by_index(abs(input_list[2]))
self.add_connection(new_conn_input_fuel, is_input=True, append_to_support_block=0)
self.add_connection(new_conn_output_fuel, is_input=False, append_to_support_block=0)
for elem in input_list[3:]:
new_conn = self.main_class.find_connection_by_index(abs(elem))
if not new_conn is None:
is_input = (elem > 0)
self.add_connection(new_conn, is_input=is_input, append_to_support_block=1)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fuels_connections = ETree.SubElement(xml_connection_list, "FuelsConnections")
product_connections = ETree.SubElement(xml_connection_list, "ProductConnections")
for support_block in self.support_block:
if support_block.is_input:
main_tree = ETree.SubElement(fuels_connections, "Block")
else:
main_tree = ETree.SubElement(product_connections, "Block")
for input_connection in support_block.external_input_connections:
input_xml = ETree.SubElement(main_tree, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in support_block.external_output_connections:
output_xml = ETree.SubElement(main_tree, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fuels_connections = input_list.find("FuelsConnections")
product_connections = input_list.find("ProductConnections")
self.__add_support_blocks(len(fuels_connections.findall("Block")), True)
self.__add_support_blocks(len(product_connections.findall("Block")), False)
i = 0
support_block_array = self.input_support_block
for connection in fuels_connections.findall("Block"):
self.__add_connection_by_index(connection, "input", append_to_support_block=support_block_array[i])
self.__add_connection_by_index(connection, "output", append_to_support_block=support_block_array[i])
i = i + 1
i = 0
support_block_array = self.output_support_block
for connection in product_connections.findall("Block"):
self.__add_connection_by_index(connection, "input", append_to_support_block=support_block_array[i])
self.__add_connection_by_index(connection, "output", append_to_support_block=support_block_array[i])
i = i + 1
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
def __add_support_blocks(self, n_support_blocks, is_input):
for i in range(1, n_support_blocks):
self.add_new_drawer(is_input)
@property
def input_support_block(self) -> list:
return_list = list()
for support_block in self.support_block:
if support_block.is_input:
return_list.append(support_block)
return return_list
@property
def output_support_block(self) -> list:
return_list = list()
for support_block in self.support_block:
if not support_block.is_input:
return_list.append(support_block)
return return_list
@classmethod
def return_EES_needed_index(cls):
return_dict = {"input_1": [1, False],
"output_1": [1, False],
"input_2": [1, False],
"output_2": [1, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "input_1", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "output_1", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity_1": {"variables": variables_list, "related_option": "none"}})
variables_list = [{"variable": "input_2", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "output_2", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity_2": {"variables": variables_list, "related_option": "none"}})
variables_list = [{"variable": "input_1", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "output_1", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity_1": {"variables": variables_list, "related_option": "none"}})
variables_list = [{"variable": "input_2", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "output_2", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity_2": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
connected_drawer = None
for drawer in self.support_block:
if drawer.connection_is_in_connections_list(input_connection):
connected_drawer = drawer
break
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In an heat exchanger the flow rate is preserved for each drawer, hence the program identify the drawer
# to which "input_connection" stream is connected and returns each fluid stream connected to that block
if connected_drawer is not None:
return connected_drawer.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In an heat exchanger fluid type is preserved for each drawer, hence the program identify the drawer to
# which "input_connection" stream is connected and returns each fluid stream connected to that block
if connected_drawer is not None:
return connected_drawer.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In an heat exchanger pressure is preserved for each drawer, hence the program identify the drawer to which
# "input_connection" stream is connected and returns each fluid stream connected to that block
if connected_drawer is not None:
return connected_drawer.get_fluid_stream_connections()
else:
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/heat_exchanger.py | heat_exchanger.py |
from EEETools.MainModules import Block
from EEETools.MainModules.support_blocks import Drawer
import xml.etree.ElementTree as ETree
from EEETools import costants
class Expander(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
self.type = "expander"
self.has_support_block = True
self.support_block.append(Drawer(main_class, self, allow_multiple_input=False))
def add_connection_to_support_block(self, new_connection, is_input):
self.support_block[0].add_connection(new_connection, is_input)
def is_ready_for_calculation(self):
return len(self.output_connections) >= 1 and len(self.support_block[0].output_connections) >= 1 and len(
self.support_block[0].input_connections) >= 1
def initialize_connection_list(self, input_list):
new_conn_power = self.main_class.find_connection_by_index(abs(input_list[0]))
new_conn_input_flow = self.main_class.find_connection_by_index(abs(input_list[1]))
new_conn_output_flow = self.main_class.find_connection_by_index(abs(input_list[2]))
new_conn_power.is_fluid_stream = False
self.add_connection(new_conn_power, is_input=False)
self.add_connection(new_conn_input_flow, is_input=True, append_to_support_block=0)
self.add_connection(new_conn_output_flow, is_input=False, append_to_support_block=0)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.support_block[0].external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.support_block[0].external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
mechanical_connections = ETree.SubElement(xml_connection_list, "MechanicalConnections")
for output_connection in self.external_output_connections:
output_xml = ETree.SubElement(mechanical_connections, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
mechanical_connections = input_list.find("MechanicalConnections")
self.__add_connection_by_index(fluid_connections, "input", append_to_support_block=0)
self.__add_connection_by_index(fluid_connections, "output", append_to_support_block=0)
self.__add_connection_by_index(mechanical_connections, "output")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"power input": [0, False],
"flow input": [1, False],
"flow output": [2, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "flow output", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In the expander flow rate is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In the expander fluid type is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In the expander pressure is not preserved, hence an empty list is returned
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/expander.py | expander.py |
from EEETools.MainModules import Block
import xml.etree.ElementTree as ETree
from EEETools import costants
class Generic(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
def is_ready_for_calculation(self):
return len(self.input_connections) >= 1 and len(self.output_connections) >= 1
def initialize_connection_list(self, input_list):
for elem in input_list:
new_conn = self.main_class.find_connection_by_index(abs(elem))
if not new_conn is None:
is_input = (elem > 0)
self.add_connection(new_conn, is_input)
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
self.__add_connection_by_index(fluid_connections, "input")
self.__add_connection_by_index(fluid_connections, "output")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"flow input": [1, True],
"flow output": [2, True]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_PRESSURE},
{"variable": "flow output", "type": costants.ZONE_TYPE_PRESSURE}]
return_element.update({"pressure_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In a generic block flow rate is not preserved, hence an empty list is returned
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In a generic block fluid type is preserved, hence if "input_connection" stream is connected to the
# block the methods returns each fluid stream connected to it
if self.connection_is_in_connections_list(input_connection):
return self.get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In a generic block pressure is preserved, hence if "input_connection" stream is connected to the
# block the methods returns each fluid stream connected to it
if self.connection_is_in_connections_list(input_connection):
return self.get_fluid_stream_connections()
else:
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/generic.py | generic.py |
from EEETools.MainModules.support_blocks import Drawer, get_drawer_sub_class
from EEETools.MainModules import Block
import xml.etree.ElementTree as ETree
from EEETools import costants
class EjectorSimplified(Block):
def __init__(self, inputID, main_class):
super().__init__(inputID, main_class)
self.type = "Ejector Simplified"
self.has_support_block = True
self.mass_ratio = 0.
SupportMixer = get_drawer_sub_class("mixer", main_class)
self.support_block.append(Drawer(main_class, self))
self.support_block.append(Drawer(main_class, self, is_input=False))
self.support_block.append(SupportMixer(main_class, self))
def add_connection_to_support_block(self, support_block, new_connection, is_input):
if support_block == "expander":
self.support_block[0].add_connection(new_connection, is_input)
elif support_block == "compressor":
self.support_block[0].add_connection(new_connection, is_input)
elif support_block == "mixer":
self.support_block[0].add_connection(new_connection, is_input)
def is_ready_for_calculation(self):
return len(self.output_connections) >= 1 and len(self.support_block[0].output_connections) >= 1 and len(
self.support_block[0].input_connections) >= 1
def initialize_connection_list(self, input_list):
self.mass_ratio = float(input_list[0])
conn_output = self.main_class.find_connection_by_index(abs(input_list[1]))
conn_input_driving = self.main_class.find_connection_by_index(abs(input_list[2]))
conn_input_driven = self.main_class.find_connection_by_index(abs(input_list[3]))
self.add_connection(conn_output, is_input=False, append_to_support_block=2)
self.support_block[0].add_connection(conn_input_driving, is_input=True, append_to_support_block=0)
self.support_block[1].add_connection(conn_input_driven, is_input=False, append_to_support_block=0)
new_conn = self.main_class.append_connection(from_block=self)
def prepare_for_calculation(self):
self.support_block[0].prepare_for_calculation()
new_conn = self.main_class.append_connection(from_block=self)
new_conn.name = "Electrical Power Output"
new_conn.is_useful_effect = True
new_conn.automatically_generated_connection = True
new_conn.exergy_value = self.exergy_balance
def export_xml_connection_list(self) -> ETree.Element:
xml_connection_list = ETree.Element("Connections")
fluid_connections = ETree.SubElement(xml_connection_list, "FluidConnections")
for input_connection in self.support_block[0].external_input_connections:
input_xml = ETree.SubElement(fluid_connections, "input")
input_xml.set("index", str(input_connection.index))
for output_connection in self.support_block[0].external_output_connections:
output_xml = ETree.SubElement(fluid_connections, "output")
output_xml.set("index", str(output_connection.index))
mechanical_connections = ETree.SubElement(xml_connection_list, "MechanicalConnections")
for output_connection in self.external_output_connections:
output_xml = ETree.SubElement(mechanical_connections, "output")
output_xml.set("index", str(output_connection.index))
return xml_connection_list
def append_xml_connection_list(self, input_list: ETree.Element):
fluid_connections = input_list.find("FluidConnections")
mechanical_connections = input_list.find("MechanicalConnections")
self.__add_connection_by_index(fluid_connections, "input", append_to_support_block=0)
self.__add_connection_by_index(fluid_connections, "output", append_to_support_block=0)
self.__add_connection_by_index(mechanical_connections, "output")
def __add_connection_by_index(self, input_list: ETree.Element, connection_name, append_to_support_block=None):
if connection_name == "input":
is_input = True
else:
is_input = False
for connection in input_list.findall(connection_name):
new_conn = self.main_class.find_connection_by_index(float(connection.get("index")))
if new_conn is not None:
self.add_connection(new_conn, is_input, append_to_support_block=append_to_support_block)
@classmethod
def return_EES_needed_index(cls):
return_dict = {"power input": [0, False],
"flow input": [1, False],
"flow output": [2, False]}
return return_dict
@classmethod
def return_EES_base_equations(cls):
return_element = dict()
variables_list = [{"variable": "flow input", "type": costants.ZONE_TYPE_FLOW_RATE},
{"variable": "flow output", "type": costants.ZONE_TYPE_FLOW_RATE}]
return_element.update({"mass_continuity": {"variables": variables_list, "related_option": "none"}})
return return_element
def return_other_zone_connections(self, zone_type, input_connection):
if zone_type == costants.ZONE_TYPE_FLOW_RATE:
# In the expander flow rate is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_FLUID:
# In the expander fluid type is preserved, hence if "input_connection" stream is connected to the support
# block (where the fluid streams are connected) the methods returns each fluid stream connected to the
# support block
if self.support_block[0].connection_is_in_connections_list(input_connection):
return self.support_block[0].get_fluid_stream_connections()
else:
return list()
elif zone_type == costants.ZONE_TYPE_PRESSURE:
# In the expander pressure is not preserved, hence an empty list is returned
return list()
else:
return list() | 3ETool | /3ETool-0.8.3.tar.gz/3ETool-0.8.3/EEETools/BlockSubClasses/ejector_simplified.py | ejector_simplified.py |
# Data Scientist Nanodegree (Term 2)
Content for Udacity's Data Science Nanodegree curriculum, which includes project and lesson content.
<a rel="license" href="http://creativecommons.org/licenses/by-nc-nd/4.0/"><img alt="Creative Commons License" style="border-width:0" src="https://i.creativecommons.org/l/by-nc-nd/4.0/88x31.png" /></a><br />This work is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-nc-nd/4.0/">Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License</a>. Please refer to [Udacity Terms of Service](https://www.udacity.com/legal) for further information.
| 3a-python-package-ligia | /3a_python_package_ligia-0.1.tar.gz/3a_python_package_ligia-0.1/README.md | README.md |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | 3a-python-package-ligia | /3a_python_package_ligia-0.1.tar.gz/3a_python_package_ligia-0.1/distributions/Gaussiandistribution.py | Gaussiandistribution.py |
# Best Buy Bullet Bot (3B Bot)
Best Buy Bullet Bot, abbreviated to 3B Bot, is a stock checking bot with auto-checkout created to instantly purchase out-of-stock items on Best Buy once restocked. It was designed for speed with ultra-fast auto-checkout, as well as the ability to utilize all cores of your CPU with multiprocessing for optimal performance.
* Headless item stock tracking
* Multiprocessing and multithreading for best possible performance
* One-time login on startup
* Ultra-fast auto-checkout
* Encrypted local credentials storage
* Super easy setup and usage
Bear in mind that 3B Bot is currently not equipped to handle a queue and/or email verification during the checkout process. If either of these is present, the bot will wait for you to take over and will take control again once you are back on the traditional checkout track.
![3B Bot](https://raw.githubusercontent.com/LeonShams/BestBuyBulletBot/main/docs/source/assets/demo.svg)
<br>
## Prerequisites
1. **A Best Buy account with your location and payment information already set in advance.**
The only information the bot will fill out during checkout is your login credentials (email and password) and the CVV of the card used when setting up your payment information on Best Buy (PayPal is currently not supported). All other information that may be required during checkout must be filled out beforehand.
2. **Python 3.6 or newer**
3B Bot is written in Python so if it is not already installed on your computer please install it from <https://www.python.org/downloads/>.
**On Windows make sure to tick the “Add Python to PATH” checkbox during the installation process.** On MacOS this is done automatically.
Once installed, checking your Python version can be done with the following.
For MacOS:
```bash
python3 --version
```
For Windows:
```bash
python --version
```
If your version is less than 3.6 or you get the message `python is not recognized as an internal or external command` then install python from the link above.
3. **A supported browser**
3B Bot currently only supports [Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/). We recommend using the Firefox browser for it's superior performance during tracking.
## Installation
Installing 3B Bot is as simple as running the following in your shell (Command Prompt for Windows and Terminal for MacOS)
For MacOS:
```bash
python3 -m pip install --upgrade 3b-bot
```
For Windows:
```bash
pip install --upgrade 3b-bot
```
## Usage
To start the bot just enter the following in your shell
```bash
3b-bot
```
**For more usage information check out our [documentation](https://bestbuybulletbot.readthedocs.io/en/latest/).**
## How does it work?
This is what 3B Bot does step by step at a high level
1. Get currently set URLs to track or prompt if none are set.
2. Using the requests library validate all URLs and get item names.
3. Open up a Google Chrome browser with selenium and perform the following.
a. Navigate to the login page.
b. If we have logged in previously we can use the saved cookies from the previous session to skip the log-in process. If not automatically fill out the username and password fields to log in.
c. Make a get request to the Best Buy API to confirm that there are no items in the cart.
d. If this is the first time using the bot check that a mailing address and payment information has been set.
e. Go to each URL and collect the page cookies. This is done so that during checkout we can just apply the cookies for that URL instead of going through the entire login process.
4. Assign each URL to a core on the CPU.
5. Each core will start a specified number of threads.
6. Each thread will repeatedly check whether the "add to cart button" is available for its item.
7. When a thread notices that an item has come back in stock it will unlock its parent core and lock all other threads on every core to conserve CPU resources and WIFI.
8. The unlocked parent will print to the terminal that the item has come back in stock, play a sound, and attempt to automatically checkout the item with the following steps.
a. With the driver that was used to track the item, click the add-to-cart button.
b. Open up another browser window (this one is visible) and navigate to the item URL to set some cookies to login.
c. Redirect to the checkout page.
d. Enter the CVV for the card.
e. Click "place order".
9. Once finished the parent will update its funds, the item quantity, and unlock all threads to resume stock tracking.
10. Sound will stop playing when the item is no longer in stock.
## Performance tips
The following are tips to achieve the best possible performance with 3B Bot.
* Use the same amount of URLs as cores on your CPU. You can create a URL group with the same URL repeated multiple times to increase the number of URLs you have and `3b-bot count-cores` can be used to see how many cores your CPU has.
* Use ethernet as opposed to WIFI for a stronger more stable connection.
* Adequately cool your computer to prevent thermal throttling.
* Tweak the number of threads per URL. This can be changed with the `3b-bot set-threads` command.
* If you plan to complete the checkout process yourself, disable auto-checkout in the settings for a significant performance improvement.
Overall, item stock tracking is a CPU and internet bound task, so at the end of the day the better your CPU and the stronger your internet the faster your tracking.
| 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/README.md | README.md |
import argparse
import warnings
from best_buy_bullet_bot.utils import count_cores
from best_buy_bullet_bot.version import __version__
class NoAction(argparse.Action):
"""Makes argument do nothing.
This is useful if we want an argument to show up in the
help menu, but remain uncallable.
"""
def __init__(self, **kwargs):
kwargs.setdefault("default", argparse.SUPPRESS)
kwargs.setdefault("nargs", 0)
super().__init__(**kwargs)
def __call__(self, *args):
pass
class FuncKwargs(dict):
"""Only passes flags to a specified function."""
def __init__(self, args):
self.args = args
super().__init__()
def add_flag(self, flag_name, cmd_name):
flag = getattr(self.args, flag_name)
flag and self.args.cmd == cmd_name and self.update({flag_name: flag})
class ImportWrapper:
"""Only imports the function that the user selects."""
def __init__(self, file):
self.file = file
def __getattribute__(self, name):
if name == "file":
return super().__getattribute__("file")
def call_func(*args, **kwargs):
imported_file = __import__(self.file, fromlist=[""])
return getattr(imported_file, name)(*args, **kwargs)
return call_func
# This is done to prevent unnecessary imports and more importantly
# prevent a bunch of warnings from setting_utils when imported
tracker = ImportWrapper("best_buy_bullet_bot.tracker")
setting_utils = ImportWrapper("best_buy_bullet_bot.data.setting_utils")
url_utils = ImportWrapper("best_buy_bullet_bot.data.url_utils")
user_data = ImportWrapper("best_buy_bullet_bot.data.user_data")
browser_login = ImportWrapper("best_buy_bullet_bot.data.browser_login")
OPS = {
"start": [tracker.start, "Start tracking the currently set URLs."],
"view-urls": [url_utils.view_urls, "View list of tracked URLs."],
"add-url": [url_utils.add_url, "Add URL to tracking list."],
"add-url-group": [
url_utils.add_url_group,
"Add multiple URLs and set a quantity for all of them as a whole instead of individually.",
],
"remove-url": [
url_utils.remove_url,
"Remove a URL from the list of tracked URLs.",
],
"test-urls": [
url_utils.test_urls,
"Tests to make sure all URLs can be tracked. This is also run on startup.",
],
"clear-urls": [url_utils.clear_urls, "Remove all tracked URLs."],
"view-settings": [setting_utils.view_settings, "View current settings."],
"set-funds": [
setting_utils.set_funds,
"Set how much money the bot is allowed to spend.",
],
"set-tax": [setting_utils.set_tax, "Set the sales tax rate for your state."],
"toggle-auto-checkout": [
setting_utils.toggle_auto_checkout,
"Enable/disable auto checkout.",
],
"change-browser": [
setting_utils.change_browser,
"Pick the browser to be used during tracking and auto-checkout (only applies if auto-checkout is enabled). \
Firefox is the default and recommended browser.",
],
"test-sound": [setting_utils.test_sound, "Play sound sample."],
"set-sound-mode": [
setting_utils.set_sound_mode,
"Choose whether you want sound to be completely disabled, play once on item restock, or play repeatedly on item restock.",
],
"set-threads": [
setting_utils.set_threads,
"Select the number of threads to allocate to tracking each URL.",
],
"count-cores": [
count_cores,
"Print how many CPU cores you have and how many threads each core has.",
],
"reset-settings": [
setting_utils.reset_settings,
"Reset setting to the defaults.",
],
"view-creds": [
user_data.print_creds,
"View your Best Buy login credentials (email, password, cvv).",
],
"set-creds": [
user_data.set_creds,
"Set your Best Buy login credentials (email, password, cvv).",
],
"clear-creds": [
user_data.clear_creds,
"Reset your Best Buy login credentials (email, password, cvv). Also offers the option to reset your access password.",
],
}
def run_command():
parser = argparse.ArgumentParser(
prog="3b-bot",
description="Setup and control your Best Buy bot.",
epilog="Good luck :)",
)
parser.add_argument(
"-v",
"--version",
action="version",
version="%(prog)s " + __version__,
help="show 3B Bot version number",
)
parser.add_argument(
"cmd",
default="start",
const="start",
nargs="?",
choices=OPS.keys(),
help="Performs a specified operation.",
metavar="command",
type=str.lower,
)
group = parser.add_argument_group(title="Available commands")
for name, [func, help_msg] in OPS.items():
group.add_argument(name, help=help_msg, action=NoAction)
parser.add_argument(
"-w", "--suppress-warnings", action="store_true", help="suppress warnings"
)
"""
EASTER EGG: Thank you for reading the source code!
To run the bot with a higher priority level and achieve better performance complete the following.
If using Firefox, complete the following before moving on to the next step:
WINDOWS: Open a Command Prompt window with "Run as administrator" https://www.educative.io/edpresso/how-to-run-cmd-as-an-administrator
MAC: Enter the command `su` in your terminal to gain root privileges. Beware your settings may be different in the root session, but you can always return to a normal session with the `exit` command.
Then regardless of your browser:
Run `3b-bot --fast` in your shell.
"""
parser.add_argument("--fast", action="store_true", help=argparse.SUPPRESS)
parser.add_argument(
"--headless", action="store_true", help="hide the browser during auto checkout"
)
parser.add_argument(
"--verify-account",
action="store_true",
help="confirm that the account is setup properly (automatically performed on first run)",
)
parser.add_argument(
"--skip-verification",
action="store_true",
help="skip checks on first run that make sure account is setup properly.",
)
parser.add_argument(
"--force-login",
action="store_true",
help="force browser to go through traditional login process as opposed to using cookies to skip steps",
)
args = parser.parse_args()
func_kwargs = FuncKwargs(args)
func_kwargs.add_flag("fast", "start")
func_kwargs.add_flag("headless", "start")
func_kwargs.add_flag("verify_account", "start")
func_kwargs.add_flag("skip_verification", "start")
if args.suppress_warnings:
warnings.filterwarnings("ignore")
else:
# Just ignore the depreciation warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
if args.force_login:
browser_login.delete_cookies()
# Run command
OPS[args.cmd][0](**func_kwargs) | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/command_line.py | command_line.py |
import logging
import sys
import clipboard
import requests
from selenium.common.exceptions import NoSuchWindowException, TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from best_buy_bullet_bot.data.browser_login import (
cookies_available,
load_cookies,
save_cookies,
)
from best_buy_bullet_bot.data.setting_utils import (
DRIVER_NAMES,
change_browser,
get_settings,
is_installed,
update_setting,
)
from best_buy_bullet_bot.utils import Colors, loading
SETTINGS = get_settings()
TAX = SETTINGS["tax"]
BROWSER_NAME = SETTINGS["browser"]
DRIVER_WRAPPER = DRIVER_NAMES[BROWSER_NAME]
MAC = sys.platform == "darwin"
USER_TAKEOVER = 20 * 60 # 20 min for user to takeover if the bot gets stuck
logging.disable(logging.WARNING)
try:
DRIVER_PATH = DRIVER_WRAPPER.manager().install()
except ValueError:
if not is_installed(SETTINGS["browser"]):
Colors.print(
f"{SETTINGS['browser'].title()} is not installed on your computer.",
properties=["fail"],
)
change_browser()
def _get_options(headless):
options = DRIVER_WRAPPER.options()
options.page_load_strategy = "none"
options.add_argument("--proxy-server='direct://'")
options.add_argument("--proxy-bypass-list=*")
if headless:
options.add_argument("--headless")
# Suppress "DevTools listening on ws:..." message
if BROWSER_NAME == "chrome":
options.add_experimental_option("excludeSwitches", ["enable-logging"])
return options
PREBUILT_OPTIONS = [_get_options(False), _get_options(True)]
def get_user_agent():
driver = DRIVER_WRAPPER.driver(
executable_path=DRIVER_PATH, options=PREBUILT_OPTIONS[True]
)
user_agent = driver.execute_script("return navigator.userAgent")
driver.quit()
return user_agent
def money2float(money):
return float(money[1:].replace(",", ""))
def fast_text(text):
clipboard.copy(text)
return (Keys.COMMAND if MAC else Keys.CONTROL) + "v"
account_page_url = "https://www.bestbuy.com/site/customer/myaccount"
billing_url = "https://www.bestbuy.com/profile/c/billinginfo/cc"
def terminate(driver):
driver.quit()
sys.exit(1)
def _login(driver, wait, headless, email, password, cookies_set):
branch = wait.until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, "#ca-remember-me, .shop-search-bar")
)
)
if branch.get_attribute("class") == "shop-search-bar":
# We are already logged in
return
# Click "Keep me signed in" button
branch.click()
if not cookies_set:
# Fill in email box
driver.find_element_by_id("fld-e").send_keys(fast_text(email))
# Fill in password box
driver.find_element_by_id("fld-p1").send_keys(fast_text(password))
# Click the submit button
driver.find_element_by_css_selector(
".btn.btn-secondary.btn-lg.btn-block.c-button-icon.c-button-icon-leading.cia-form__controls__submit"
).click()
# Check for error or redirect
branch = wait.until(
EC.presence_of_element_located(
(
By.CSS_SELECTOR,
".shop-search-bar, " # We got redirected to the account page
".cia-cancel, " # Skippable verification page
".c-alert.c-alert-level-error, " # Error popup message
"#fld-e-text, " # Invalid email address
"#fld-p1-text", # Invalid password
)
)
)
# If we hit an error
if branch.get_attribute(
"class"
) == "c-alert c-alert-level-error" or branch.get_attribute("id") in [
"fld-e-text",
"fld-p1-text",
]:
if headless:
# If headless raise error
Colors.print(
"Incorrect login info. Please correct the username or password.",
properties=["fail", "bold"],
)
terminate(driver)
else:
# If headful ask the user to take over
Colors.print(
"Unable to login automatically. Please correct your credentials or enter the information manually.",
properties=["fail"],
)
branch = wait.until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, ".shop-search-bar, .cia-cancel")
)
)
# If we hit a skippable verification page
if "cia-cancel" in branch.get_attribute("class"):
# Redirect to the "my account" page
driver.get(account_page_url)
wait.until(EC.presence_of_element_located((By.CLASS_NAME, "shop-search-bar")))
save_cookies(driver)
def check_cart(driver):
with loading("Confirming cart is empty"):
# Confirm that the cart is empty
headers = {
"Host": "www.bestbuy.com",
"User-Agent": driver.execute_script("return navigator.userAgent"),
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": "https://www.bestbuy.com/site/customer/myaccount",
"X-CLIENT-ID": "browse",
"X-REQUEST-ID": "global-header-cart-count",
"Connection": "keep-alive",
"Cookie": driver.execute_script("return document.cookie"),
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"Cache-Control": "max-age=0",
}
# Make a get request to the Best Buy API to get the number of items in the cart
response = requests.get(
"https://www.bestbuy.com/basket/v1/basketCount", headers=headers
)
items = response.json()["count"]
if items != 0:
Colors.print(
"Too many items in the cart. Please empty your cart before starting the bot.",
properties=["fail", "bold"],
)
terminate(driver)
def perform_account_verification(driver, wait):
with loading("Verifying account setup"):
# Check that a shipping address has been set
shipping_address = wait.until(
EC.presence_of_element_located(
(
By.CSS_SELECTOR,
"div.account-setting-block-container:nth-child(1) > div:nth-child(2) > a:last-child",
)
)
)
if shipping_address.get_attribute("class") == "":
Colors.print(
"Shipping address has not been set. You can add a shipping address to \
your account at https://www.bestbuy.com/profile/c/address/shipping/add.",
properties=["fail", "bold"],
)
terminate(driver)
# Confirm that a default payment method has been created
driver.get(billing_url)
payment_method_list = wait.until(
EC.presence_of_element_located(
(
By.CSS_SELECTOR,
".pf-credit-card-list__content-spacer > ul.pf-credit-card-list__credit-card-list",
)
)
)
if payment_method_list.size["height"] == 0:
Colors.print(
f"A default payment method has not been created. Please create one at {billing_url}.",
properties=["fail", "bold"],
)
terminate(driver)
Colors.print("Account has passed all checks!", properties=["success"])
def collect_item_cookies(driver, wait, urls):
login_cookies_list = []
predicted_prices = []
price_element = None
with loading("Collecting cookies for each URL"):
for url in urls:
driver.get(url)
if price_element is not None:
wait.until(EC.staleness_of(price_element))
price_element = wait.until(
EC.presence_of_element_located(
(
By.CSS_SELECTOR,
".pricing-price > div > div > div > .priceView-hero-price.priceView-customer-price, "
".pricing-price > div > div > div > div > section > div > div > .priceView-hero-price.priceView-customer-price",
)
)
)
item_price = price_element.text.split("\n")[0]
predicted_prices.append(money2float(item_price) * (1 + TAX))
login_cookies_list.append(driver.get_cookies())
return login_cookies_list, predicted_prices
def _browser_startup(
driver, headless, email, password, urls, verify_account, skip_verification
):
wait = WebDriverWait(driver, USER_TAKEOVER)
with loading("Logging in"):
driver.get(account_page_url)
# We will then get redirected to the sign in page
# If we have logged in previously we can use the cookies from that session
# to skip steps in the login process and prevent the system from detecting
# a bunch of logins from the same account
cookies_exist = cookies_available()
if cookies_exist:
if load_cookies(driver):
driver.refresh()
else:
# An error occurred while adding the login cookies
cookies_exist = False
_login(driver, wait, headless, email, password, cookies_exist)
check_cart(driver)
if not skip_verification:
if SETTINGS["account verification"] or verify_account:
perform_account_verification(driver, wait)
if not verify_account:
print("This was a one time test and will not be performed again.\n")
update_setting("account verification", False)
item_cookies = collect_item_cookies(driver, wait, urls)
driver.quit()
return item_cookies
def browser_startup(headless, *args, **kwargs):
driver = DRIVER_WRAPPER.driver(
executable_path=DRIVER_PATH, options=PREBUILT_OPTIONS[headless]
)
try:
return _browser_startup(driver, headless, *args, **kwargs)
# Timed out while trying to locate element
except TimeoutException:
Colors.print(
"Browser window has timed out. Closing bot.", properties=["fail", "bold"]
)
terminate(driver)
# User has closed the browser window
except NoSuchWindowException:
terminate(driver)
def _purchase(
driver,
title,
password,
cvv,
money_manager,
):
# Go to the checkout page
driver.get("https://www.bestbuy.com/checkout/r/fast-track")
wait = WebDriverWait(driver, USER_TAKEOVER)
# Get to the CVV page
while True:
branch = wait.until(
EC.element_to_be_clickable(
(
By.CSS_SELECTOR,
"#credit-card-cvv, " # Place order page
".button--continue > button.btn.btn-lg.btn-block.btn-secondary, " # Continue to payment info page
".checkout-buttons__checkout > .btn.btn-lg.btn-block.btn-primary", # We got redirected to the cart
)
)
)
# If we got redirected to the cart
if branch.get_attribute("class") == "btn btn-lg btn-block btn-primary":
# Click "proceed to checkout" button
branch.click()
branch = wait.until(
EC.element_to_be_clickable(
(
By.CSS_SELECTOR,
"#credit-card-cvv, " # Place order page
"#cvv, " # Review and place order page
".button--continue > button.btn.btn-lg.btn-block.btn-secondary, " # Continue to place order page (page before place order page)
"#fld-p1", # Sign in (only requires password)
)
)
)
# If it wants to confirm our password
if branch.get_attribute("class").strip() == "tb-input":
branch.send_keys(fast_text(password))
driver.find_element_by_css_selector(
".btn.btn-secondary.btn-lg.btn-block.c-button-icon.c-button-icon-leading.cia-form__controls__submit"
).click() # Click sign in button
# We will loop back around and handle what comes next
else:
break
else:
break
# Select the CVV text box
if branch.get_attribute("class") == "btn btn-lg btn-block btn-secondary":
branch.click()
cvv_box = wait.until(
EC.element_to_be_clickable(
(
By.CSS_SELECTOR,
"#credit-card-cvv, #cvv",
)
)
)
else:
cvv_box = branch
cvv_box.send_keys(fast_text(cvv))
# Locate and parse the grand total text
grand_total = money2float(
driver.find_element_by_css_selector(
".order-summary__total > .order-summary__price > .cash-money"
).text
)
# Make sure we have sufficient funds for the purchase
if money_manager.check_funds(grand_total):
# Click place order button
driver.find_element_by_css_selector(
".btn.btn-lg.btn-block.btn-primary, .btn.btn-lg.btn-block.btn-primary.button__fast-track"
).click()
# Deduct grand total from available funds
money_manager.make_purchase(grand_total)
Colors.print(
f"Successfully purchased {title}. The item was a grand total of ${grand_total:,.2f} leaving you with ${money_manager.get_funds():,.2f} of available funds.",
properties=["success", "bold"],
)
return True
else:
Colors.print(
f"Insuffient funds to purchase {title} which costs a grand total of ${grand_total:,.2f} while you only have ${money_manager.get_funds():,.2f} of available funds.",
properties=["fail"],
)
return False
def purchase(
url, login_cookies, headless, headless_driver, headless_wait, *args, **kwargs
):
if not headless:
# Create a new visible driver for the checkout process
driver = DRIVER_WRAPPER.driver(
executable_path=DRIVER_PATH, options=PREBUILT_OPTIONS[False]
)
driver.get(url)
for cookie in login_cookies:
driver.add_cookie(cookie)
else:
# Use the old headless driver so we don't have to create a new one
driver = headless_driver
# Have the existing headless tracker driver click the add-to-cart button
headless_wait.until(
EC.element_to_be_clickable(
(
By.CSS_SELECTOR,
".fulfillment-add-to-cart-button > div > div > button",
)
)
).click()
try:
return _purchase(driver, *args, **kwargs)
except TimeoutException:
Colors.print(
"3B Bot got stuck and nobody took over. Tracking will resume.",
properties=["fail"],
)
except NoSuchWindowException:
driver.quit()
return False | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/browser.py | browser.py |
import warnings
import psutil
from rich import get_console
from rich.columns import Columns
from rich.live import Live
from rich.spinner import Spinner
from rich.table import Table
def _pretty_warning(msg, *args, **kwargs):
return Colors.str(f"WARNING: {msg}\n", properties=["warning"])
warnings.formatwarning = _pretty_warning
class Colors:
SUCCESS = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
BLUE = "\033[94m"
BOLD = "\033[1m"
_ENDC = "\033[0m"
@staticmethod
def _props2str(props):
return "".join([getattr(Colors, prop.upper()) for prop in props])
@staticmethod
def str(string, properties=[]):
return Colors._props2str(properties) + string + Colors._ENDC
@staticmethod
def print(*args, properties=[], **kwargs):
print(Colors._props2str(properties), end="")
print_end = kwargs.pop("end", "\n")
print(*args, **kwargs, end=Colors._ENDC + print_end)
@staticmethod
def warn(*args, **kwargs):
warnings.warn(*args, **kwargs)
def print_table(columns, rows, justifications=["left", "center"]):
table = Table(show_lines=True)
for i, column in enumerate(columns):
table.add_column(column, justify=justifications[i])
for row in rows:
row = list(map(str, row))
max_lines = max(string.count("\n") for string in row)
vert_align_row = [
"\n" * int((max_lines - string.count("\n")) / 2) + string for string in row
]
table.add_row(*vert_align_row)
with get_console() as console:
console.print(table)
def count_cores():
cores = psutil.cpu_count(logical=False)
print("Cores:", cores)
threads = psutil.cpu_count(logical=True) / cores
int_threads = int(threads)
if int_threads == threads:
print("Threads per core:", int_threads)
def warnings_suppressed():
return any(
[filter[0] == "ignore" and filter[2] is Warning for filter in warnings.filters]
)
def loading(msg):
loading_text = Columns([msg, Spinner("simpleDotsScrolling")])
return Live(loading_text, refresh_per_second=5, transient=True)
def yes_or_no(prompt):
while True:
response = input(prompt).lower().strip()
if response == "":
continue
responded_yes = response == "yes"[: len(response)]
responded_no = response == "no"[: len(response)]
if responded_yes != responded_no: # responeded_yes xor responded_no
return responded_yes
else:
Colors.print(
'Invalid response. Please enter either "y" or "n"', properties=["fail"]
)
def validate_num(val, dtype):
try:
cast_val = dtype(val)
except ValueError:
return
if dtype is int and cast_val != float(val):
return
return cast_val | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/utils.py | utils.py |
import builtins
import os
import signal
import sys
import time
from multiprocessing import Pool
from multiprocessing.managers import BaseManager
from multiprocessing.pool import ThreadPool
from threading import Event, Lock
import psutil
from bs4 import BeautifulSoup
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import RequestException
from requests.packages.urllib3.util.retry import Retry
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from best_buy_bullet_bot.audio import sound_effects
from best_buy_bullet_bot.browser import purchase
from best_buy_bullet_bot.data import user_data
from best_buy_bullet_bot.data.setting_utils import (
DRIVER_NAMES,
SOUND_MODES,
MoneyManager,
get_settings,
)
from best_buy_bullet_bot.data.url_utils import QtyManager
from best_buy_bullet_bot.tracker.progress_bar import IndefeniteProgressBar
from best_buy_bullet_bot.utils import Colors
WINDOWS = sys.platform == "win32"
SETTINGS = get_settings()
SOUND_MODE = SETTINGS["sound mode"]
AUTO_CHECKOUT = SETTINGS["auto checkout"]
BROWSER_NAME = SETTINGS["browser"]
DRIVER_WRAPPER = DRIVER_NAMES[BROWSER_NAME]
NUM_THREADS = SETTINGS["threads"]
class TwoWayPause:
def __init__(self):
self.play = Event()
self.play.set()
self.pause = Event()
def is_set(self):
return self.pause.is_set()
def set(self):
self.play.clear()
self.pause.set()
def clear(self):
self.pause.clear()
self.play.set()
def wait(self):
self.pause.wait()
def wait_inverse(self):
self.play.wait()
# The following two classes are needed to use magic methods with `BaseManager`
class NoMagicLock:
def __init__(self):
self.lock = Lock()
def enter(self, *args, **kwargs):
self.lock.__enter__(*args, **kwargs)
def exit(self, *args, **kwargs):
self.lock.__exit__(*args, **kwargs)
class NormalLock:
def __init__(self, no_magic_lock):
self.lock = no_magic_lock
def __enter__(self, *args, **kwargs):
self.lock.enter(*args, **kwargs)
def __exit__(self, *args, **kwargs):
self.lock.exit(*args, **kwargs)
BaseManager.register("ThreadLock", NoMagicLock)
BaseManager.register("QtyManager", QtyManager)
BaseManager.register("IndefeniteProgressBar", IndefeniteProgressBar)
BaseManager.register("PauseEvent", TwoWayPause)
BaseManager.register("MoneyManager", MoneyManager)
STOCK = False
def track(
title,
url,
qty,
headless,
login_cookies,
password,
cvv,
thread_lock,
paused,
pbar,
pred_price,
money_manager,
headers,
):
builtins.print = pbar.print
if not AUTO_CHECKOUT:
headers["referer"] = url
thread_lock = NormalLock(thread_lock)
with ThreadPool(NUM_THREADS) as pool:
pool.starmap_async(
run,
[
[
title,
url,
qty,
login_cookies,
thread_lock,
paused,
pbar,
pred_price,
money_manager,
headers,
]
for _ in range(NUM_THREADS)
],
)
await_checkout(
title,
url,
qty,
headless,
login_cookies,
password,
cvv,
paused,
pred_price,
money_manager,
)
def await_checkout(
title,
url,
qty,
headless,
login_cookies,
password,
cvv,
paused,
pred_price,
money_manager,
):
global STOCK
while True:
paused.wait()
if STOCK:
if not money_manager.check_funds(pred_price) or not qty.get():
paused.clear()
if SOUND_MODE == SOUND_MODES[2]:
sound_effects.stop()
Colors.print(
f'All requested "{title}" were purchased.'
if money_manager.check_funds(pred_price)
else f"With only ${money_manager.get_funds():,.2f} you cannot afford {title}.",
"It will no longer be tracked to conserve resources.\n",
properties=["warning"],
)
return
current_time = time.strftime("%H:%M:%S", time.localtime())
Colors.print(
f'\n{current_time} - "{title}" - {url}\n',
properties=["bold"],
)
# Plays a sound
if SOUND_MODE == SOUND_MODES[1]:
sound_effects.play()
elif SOUND_MODE == SOUND_MODES[2]:
sound_effects.start()
if AUTO_CHECKOUT:
try:
while money_manager.check_funds(pred_price) and qty.get():
if purchase(
url,
login_cookies,
headless,
*STOCK, # `headless_driver` and `headless_wait`
title,
password,
cvv,
money_manager,
):
qty.decrement()
else:
break
except Exception as e:
Colors.print(
f"CHECKOUT ERROR: {e}",
)
STOCK = False
paused.clear()
else:
paused.wait_inverse()
def run(
title,
url,
qty,
login_cookies,
thread_lock,
paused,
pbar,
pred_price,
money_manager,
headers,
):
global STOCK
stop_tracker = False
if AUTO_CHECKOUT:
options = DRIVER_WRAPPER.options()
options.page_load_strategy = "none"
options.add_argument("--proxy-server='direct://'")
options.add_argument("--proxy-bypass-list=*")
options.add_argument("--headless")
# Suppress "DevTools listening on ws:..." message
if BROWSER_NAME == "chrome":
options.add_experimental_option("excludeSwitches", ["enable-logging"])
# Create the browser window
driver = DRIVER_WRAPPER.driver(
executable_path=DRIVER_WRAPPER.manager().install(), options=options
)
# Login to the browser by setting the cookies
driver.get(url)
for cookie in login_cookies:
driver.add_cookie(cookie)
wait = WebDriverWait(driver, 120)
button_locator = EC.presence_of_element_located(
(
By.CSS_SELECTOR,
".fulfillment-add-to-cart-button > div > div > button",
)
)
# Confirm that we have a stable connection and that Best Buy hasn't made any
# changes to their website that would break out locator
try:
btn = wait.until(button_locator)
except TimeoutException:
Colors.print(
f"Unable to connect to {title}. Closing tracker.",
properties=["fail"],
)
stop_tracker = True
else:
session = Session()
session.headers.update(headers)
retry = Retry(
connect=3,
backoff_factor=0.25,
status_forcelist=[429, 500, 502, 503, 504],
method_whitelist=["HEAD", "GET", "OPTIONS"],
)
adapter = HTTPAdapter(max_retries=retry)
session.mount("https://", adapter)
session.mount("http://", adapter)
connection_status = True
available = False
prev_available = False
# Track item so long as we have sufficient funds and haven't bought the item too many times
while not stop_tracker and (
not AUTO_CHECKOUT or (money_manager.check_funds(pred_price) and qty.get())
):
# Stop trackers to conserve resources during the auto checkout process
if paused.is_set():
paused.wait_inverse()
continue
if AUTO_CHECKOUT:
driver.get(url)
try:
# Wait until old page has unloaded
wait.until(EC.staleness_of(btn))
if paused.is_set():
# Stop page load (page will reload when tracker restarts)
driver.execute_script("window.stop();")
continue
# Wait until the add-to-cart button is present on the new page
btn = wait.until(button_locator)
# Inform the user if an error occurs while trying to locate the add-to-cart button
except TimeoutException:
if connection_status:
start_time = time.time()
Colors.print(
f"{title} tracker has lost connection.\n",
properties=["fail"],
)
connection_status = False
continue
# Check if it is an add-to-cart button
available = btn.get_attribute("data-button-state") == "ADD_TO_CART"
else:
try:
# Make a get request
response = session.get(url, timeout=10)
response.raise_for_status()
except RequestException as e:
# Inform the user if an error occurs while trying to make a get request
if connection_status:
start_time = time.time()
Colors.print(
f"Unable to establish a connection to {title} remote endpoint.",
properties=["fail"],
)
print(e, "\n")
connection_status = False
continue
if paused.is_set():
continue
# Look for add-to-cart button
soup = BeautifulSoup(response.text, "html.parser")
available = (
soup.find(
"button",
{"data-button-state": "ADD_TO_CART"},
)
is not None
)
pbar.update()
# If we reconnected, inform the user
if not connection_status:
Colors.print(
f"{title} tracker has successfully reconnected!",
properties=["success"],
)
print(f"Downtime: {time.time()-start_time:.2f} seconds \n")
connection_status = True
# If the item is in stock
if available:
# Unlock the checkout process if it hasn't been already
with thread_lock:
if paused.is_set():
continue
if AUTO_CHECKOUT:
if not STOCK:
STOCK = (driver, wait)
else:
STOCK = True
paused.set()
# If item went back to being out of stock
elif prev_available != available:
if SOUND_MODE == SOUND_MODES[2]:
sound_effects.stop()
prev_available = available
# Stop the auto checkout function
if STOCK is not True:
STOCK = True
paused.set()
if AUTO_CHECKOUT:
driver.close()
else:
session.close()
def set_priority(high_priority):
p = psutil.Process(os.getpid())
"""
EASTER EGG: Thank you for reading the source code!
To run the bot with a higher priority level and achieve better performance complete the following.
If using Firefox, complete the following before moving on to the next step:
WINDOWS: Open a Command Prompt window with "Run as administrator" https://www.educative.io/edpresso/how-to-run-cmd-as-an-administrator
MAC: Enter the command `su` in your terminal to gain root privileges. Beware your settings may be different in the root session, but you can always return to a normal session with the `exit` command.
Then regardless of your browser:
Run `3b-bot --fast` in your shell.
"""
# Windows: REALTIME_PRIORITY_CLASS, HIGH_PRIORITY_CLASS, ABOVE_NORMAL_PRIORITY_CLASS, NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS, IDLE_PRIORITY_CLASS
# MacOS: -20 is highest priority while 20 is lowest priority
# Lower priorities are used here so other things can still be done on the computer while the bot is running
priority = (
(psutil.HIGH_PRIORITY_CLASS if WINDOWS else -10)
if high_priority
else (psutil.BELOW_NORMAL_PRIORITY_CLASS if WINDOWS else 10)
)
p.nice(priority)
def start(fast=False, headless=False, verify_account=False, skip_verification=False):
from elevate import elevate
from best_buy_bullet_bot.browser import browser_startup, get_user_agent
from best_buy_bullet_bot.data import close_data, url_utils
from best_buy_bullet_bot.utils import loading, warnings_suppressed, yes_or_no
"""
EASTER EGG: Thank you for reading the source code!
To run the bot with a higher priority level and achieve better performance complete the following.
If using Firefox, complete the following before moving on to the next step:
WINDOWS: Open a Command Prompt window with "Run as administrator" https://www.educative.io/edpresso/how-to-run-cmd-as-an-administrator
MAC: Enter the command `su` in your terminal to gain root privileges. Beware your settings may be different in the root session, but you can always return to a normal session with the `exit` command.
Then regardless of your browser:
Run `3b-bot --fast` in your shell.
"""
# If we don't have admin privileges try to elevate permissions
if fast and hasattr(os, "getuid") and os.getuid() != 0:
print("Elevating permissions to run in fast mode.")
elevate(graphical=False)
def kill_all(*args, **kwargs):
if not WINDOWS:
# Delete the temp file
close_data()
# Forcefully close everything
os.system(
f"taskkill /F /im {psutil.Process(os.getpid()).name()}"
) if WINDOWS else os.killpg(os.getpgid(os.getpid()), signal.SIGKILL)
def clean_kill(*args, **kwargs):
# Suppress error messages created as a result of termination
# Selenium will often print long error messages during termination
sys.stderr = open(os.devnull, "w")
# Close the pbar and kill everything if the process pool has been created
if "pbar" in locals():
pbar.close()
print()
kill_all() # Inelegant but fast
# Otherwise we can exit the traditional way
else:
print()
sys.exit(0)
# Use custom functions to exit properly
for sig in [signal.SIGINT, signal.SIGBREAK if WINDOWS else signal.SIGQUIT]:
signal.signal(sig, clean_kill)
signal.signal(signal.SIGTERM, kill_all)
print(
"""
.d8888b. 888888b. 888888b. 888
d88P Y88b 888 "88b 888 "88b 888
.d88P 888 .88P 888 .88P 888
8888" 8888888K. 8888888K. .d88b. 888888
"Y8b. 888 "Y88b 888 "Y88b d88""88b 888
888 888 888 888 888 888 888 888 888
Y88b d88P 888 d88P 888 d88P Y88..88P Y88b.
"Y8888P" 8888888P" 8888888P" "Y88P" "Y888
"""
)
# Check if a flag has been passed to suppress warnings
suppress_warnings = warnings_suppressed()
raw_urls = url_utils.get_url_data()
if not len(raw_urls):
print()
Colors.warn("No URLs have been set to be tracked.")
if not suppress_warnings:
if yes_or_no("Would you like to set some URLs for tracking (y/n): "):
while True:
url_utils.add_url()
if not yes_or_no("Do you want to add another url (y/n): "):
break
raw_urls = url_utils.get_url_data()
if not len(raw_urls):
Colors.print(
"Not enough URLs for tracking.",
"Please add at least 1 URL.",
"URLs can be added with `3b-bot add-url`.",
properties=["fail", "bold"],
)
sys.exit(1)
print("Tracking the following URLs.")
url_utils.view_urls(AUTO_CHECKOUT)
manager = BaseManager()
manager.start()
money_manager = manager.MoneyManager()
if AUTO_CHECKOUT:
print(f"Current funds: ${money_manager.get_funds():,.2f}")
print()
# Get URLs and a quantity object for each URL
urls, qtys = [], []
for url_group, raw_qty in raw_urls:
int_qty = -1 if raw_qty == "inf" else raw_qty
# Create a shared qty manager between URLs for URL groups
qty = manager.QtyManager(int_qty) if len(url_group) > 1 else QtyManager(int_qty)
urls += url_group
qtys += [qty] * len(url_group)
with loading("Checking URLs"):
titles = list(url_utils.get_url_titles())
if len(titles) < len(urls):
sys.exit(1)
elif len(titles) > len(urls):
Colors.print(
"Something went wrong!",
"Please report the issue to https://github.com/LeonShams/BestBuyBulletBot/issues.",
"Feel free to copy and paste the following when opening an issue.",
properties=["fail", "bold"],
)
print(
"ERROR ENCOUNTERED DURING EXECUTION: More titles than URLs!",
f"Raw URLs: {raw_urls}",
f"URLs: {urls}",
f"Titles: {titles}",
sep="\n",
)
sys.exit(1)
if AUTO_CHECKOUT:
email, password, cvv = user_data.get_creds()
if not (email or password or cvv):
Colors.warn(
"\nCheckout credentials have not been set. Run `3b-bot set-creds` to add the necessary information."
)
if not suppress_warnings:
if yes_or_no("Would you like to set your checkout credentials (y/n): "):
user_data.set_creds()
email, password, cvv = user_data.get_creds()
print()
login_cookies_list, predicted_prices = browser_startup(
headless, email, password, urls, verify_account, skip_verification
)
headers = {}
else:
email, password, cvv = "", "", ""
login_cookies_list, predicted_prices = ((None for _ in urls) for i in range(2))
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
"sec-ch-ua-mobile": "?0",
"sec-fetch-dest": "script",
"sec-fetch-mode": "no-cors",
"sec-fetch-site": "same-origin",
"user-agent": get_user_agent(),
}
Colors.print("Availability tracking has started!", properties=["success"])
if fast:
Colors.print("Fast tracking enabled!", properties=["blue"])
print()
# Create remaining shared objects
thread_lock = manager.ThreadLock()
paused = manager.PauseEvent()
pbar = manager.IndefeniteProgressBar()
# Start process for each URL
with Pool(len(urls), set_priority, [fast]) as p:
p.starmap(
track,
[
[
title,
url,
qty,
headless,
login_cookies,
password,
cvv,
thread_lock,
paused,
pbar,
pred_price,
money_manager,
headers,
]
for title, url, qty, login_cookies, pred_price in zip(
titles, urls, qtys, login_cookies_list, predicted_prices
)
],
)
pbar.close()
print("\nAll processes have finished.") | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/tracker/__init__.py | __init__.py |
import json
import os.path
from bs4 import BeautifulSoup
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import RequestException
from requests.packages.urllib3.util.retry import Retry
from best_buy_bullet_bot.browser import get_user_agent
from best_buy_bullet_bot.data import SHARED_DIR
from best_buy_bullet_bot.utils import (
Colors,
loading,
print_table,
validate_num,
yes_or_no,
)
URL_DIR = os.path.join(SHARED_DIR, "urls.json")
def _read():
with open(URL_DIR) as f:
return json.load(f)
def _save(data):
with open(URL_DIR, "w+") as f:
json.dump(data, f)
if not os.path.isfile(URL_DIR):
_save({})
def get_url_data():
items = _read().items()
return [[item.split("\n"), qty] for item, qty in items]
def view_urls(show_qty=True):
data = _read().items()
columns = ["URL"] + (["Quantity"] if show_qty else [])
rows = [[url, qty] for url, qty in data] if show_qty else zip(list(zip(*data))[0])
print_table(columns, rows)
def get_qty():
while True:
qty = input("Quantity (optional): ")
if qty.strip() == "" or qty == "inf":
return "inf"
qty = validate_num(qty, int)
if qty is None or qty < 1:
Colors.print(
"Invalid input for quantity. Please enter an integer greater than or equal to 1.",
properties=["fail"],
)
else:
return qty
class QtyManager:
def __init__(self, qty):
self.qty = qty
def get(self):
return self.qty
def decrement(self):
self.qty -= 1
def add_url():
new_url = input("URL to add: ")
if new_url.strip() == "":
print("Aborted.")
return
urls = _read()
qty = get_qty()
urls[new_url] = qty
_save(urls)
Colors.print(
f"Successfully added {new_url}{'' if qty == 'inf' else f' with a quantity of {qty}'}!",
properties=["success"],
)
def add_url_group():
url_group = []
i = 1
while True:
new_url = input("URL to add: ")
if new_url.strip() == "":
if i == 1:
print("Aborted.")
break
else:
continue
url_group.append(new_url)
if i >= 2 and not yes_or_no("Would you like to add another URL (y/n): "):
break
i += 1
urls = _read()
qty = get_qty()
urls["\n".join(url_group)] = qty
_save(urls)
Colors.print(
f"Successfully added a URL group with {len(url_group)} URLs{'' if qty == 'inf' else f' and a quantity of {qty} for the group'}!",
properties=["success"],
)
def remove_url():
urls = _read()
ids = range(1, len(urls) + 1)
rows = list(zip(ids, urls.keys()))
print_table(["ID", "Active URLs"], rows, justifications=["center", "left"])
while True:
url_id = input("URL ID to remove: ").strip()
if url_id == "":
continue
url_id = validate_num(url_id, int)
if url_id is None or url_id < ids[0] or url_id > ids[-1]:
Colors.print(
f"Please enter valid URL ID between {ids[0]}-{ids[-1]}. Do not enter the URL itself.",
properties=["fail"],
)
else:
break
selected_url = list(urls.keys())[url_id - 1]
del urls[selected_url]
_save(urls)
comma_separated_selection = selected_url.replace("\n", ", ")
Colors.print(
f"Successfully removed: {comma_separated_selection}", properties=["success"]
)
def get_url_titles():
flattened_urls = [
url
for url_group, _ in get_url_data()
for url in (url_group if type(url_group) is list else [url_group])
]
session = Session()
session.headers.update({"user-agent": get_user_agent()})
retry = Retry(
connect=3,
backoff_factor=1,
status_forcelist=[429, 500, 502, 503, 504],
method_whitelist=["HEAD", "GET", "OPTIONS"],
)
adapter = HTTPAdapter(max_retries=retry)
session.mount("https://", adapter)
session.mount("http://", adapter)
for url in flattened_urls:
try:
response = session.get(url, timeout=10)
except RequestException as e:
Colors.print(e, properties=["fail", "bold"])
continue
soup = BeautifulSoup(response.text, "html.parser")
raw_title = soup.find("div", class_="sku-title")
if raw_title is None:
Colors.print(
f"Unable to find title for {url}.", properties=["fail", "bold"]
)
continue
title = raw_title.get_text().strip()
in_stock = soup.find(
"button",
{"data-button-state": "ADD_TO_CART"},
)
if in_stock:
Colors.warn(f"{title} is already in stock.")
yield title
session.close()
def test_urls():
with loading("Testing URLs"):
for title in get_url_titles():
Colors.print(f"Confirmed {title}!", properties=["success"])
def clear_urls():
_save({})
Colors.print("Successfully removed all URLs!", properties=["success"]) | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/data/url_utils.py | url_utils.py |
import json
import logging
import os.path
import shutil
import sys
from time import sleep
from selenium.common.exceptions import WebDriverException
from selenium.webdriver import Chrome, Firefox
from selenium.webdriver.chrome.options import Options as ChromeOptions
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from webdriver_manager.chrome import ChromeDriverManager
from webdriver_manager.firefox import GeckoDriverManager
from best_buy_bullet_bot.audio import sound_effects
from best_buy_bullet_bot.data import HEADLESS_WARNED, SHARED_DIR
from best_buy_bullet_bot.utils import (
Colors,
loading,
print_table,
validate_num,
warnings_suppressed,
yes_or_no,
)
SETTINGS_DIR = os.path.join(SHARED_DIR, "settings.json")
SOUND_MODES = ["disabled", "single", "repeat"]
DEFAULT_SETTINGS = {
"funds": 1000,
"tax": 0.095,
"auto checkout": True,
"account verification": True,
"browser": "firefox",
"sound mode": SOUND_MODES[2],
"threads": 1,
}
def _save(data):
with open(SETTINGS_DIR, "w+") as f:
json.dump(data, f)
# Get the current settings
CURRENT_SETTINGS = DEFAULT_SETTINGS.copy()
if os.path.isfile(SETTINGS_DIR):
save = False
with open(SETTINGS_DIR) as f:
for key, value in json.load(f).items():
if key in CURRENT_SETTINGS:
CURRENT_SETTINGS[key] = value
elif not HEADLESS_WARNED():
Colors.warn(
f"{key} is no longer supported and will be removed from your settings."
)
save = True
if save and not HEADLESS_WARNED():
if not warnings_suppressed() and yes_or_no(
"Delete unsupported settings from your settings file (y/n): "
):
_save(CURRENT_SETTINGS)
HEADLESS_WARNED.update(True)
else:
_save(DEFAULT_SETTINGS)
def get_settings():
# A copy is returned so the settings can be safely manipulated
return CURRENT_SETTINGS.copy()
def update_setting(setting, new_val):
CURRENT_SETTINGS[setting] = new_val
_save(CURRENT_SETTINGS)
def view_settings(show_default=False):
settings = (DEFAULT_SETTINGS if show_default else CURRENT_SETTINGS).copy()
settings["funds"] = f"${settings['funds']:,.2f}"
settings["tax"] = f"{settings['tax'] * 100:.2f}%"
settings["browser"] = settings["browser"].title()
# Hidden property
del settings["account verification"]
rows = [[k.title(), v] for k, v in settings.items()]
print_table(["Property", "Value"], rows)
def set_funds():
while True:
funds = input("Allotted money: $")
funds = validate_num(funds.replace("$", ""), float)
if funds is None or funds < 0:
Colors.print(
"Invalid input for funds. Please enter a positive number.",
properties=["fail"],
)
else:
break
update_setting("funds", funds)
Colors.print(f"Successfully set funds to ${funds:,.2f}!", properties=["success"])
def set_tax():
while True:
tax = input("Sales tax rate (%): ")
tax = validate_num(tax.replace("%", ""), float)
if tax is None or tax < 0:
Colors.print(
"Invalid input. Please enter a positive percentage for tax.",
properties=["fail"],
)
else:
break
update_setting("tax", tax / 100)
Colors.print(
f"Successfully set the state sales tax rate to {tax:,.2f}%",
properties=["success"],
)
class MoneyManager:
def get_funds(self):
return CURRENT_SETTINGS["funds"]
def check_funds(self, cost):
return CURRENT_SETTINGS["funds"] - cost >= 0
def make_purchase(self, cost):
update_setting("funds", CURRENT_SETTINGS["funds"] - cost)
def _toggle_setting(setting_name):
update_setting(setting_name, not CURRENT_SETTINGS[setting_name])
Colors.print(
f"Successfully {'enabled' if CURRENT_SETTINGS[setting_name] else 'disabled'} {setting_name}!",
properties=["success"],
)
def toggle_auto_checkout():
_toggle_setting("auto checkout")
class DriverClassWrapper:
def __init__(self, driver, manager, options):
self.driver = driver
self.manager = manager
self.options = options
logging.disable(logging.WARNING)
DRIVER_NAMES = {
"chrome": DriverClassWrapper(Chrome, ChromeDriverManager, ChromeOptions),
"firefox": DriverClassWrapper(Firefox, GeckoDriverManager, FirefoxOptions),
}
def is_installed(browser_name):
"""Check if browser is installed
Done by installing the drivers and trying to open the browser with selenium.
If we can successfully open the browser then it is installed.
"""
browser_name = browser_name.lower()
if browser_name in DRIVER_NAMES:
wrap = DRIVER_NAMES[browser_name]
else:
raise ValueError(
f"3B Bot does not support {browser_name.title()}. Please pick either Chrome or Firefox."
)
# Install the drivers
try:
manager = wrap.manager()
path = manager.install()
except ValueError:
return False
options = wrap.options()
options.add_argument("--headless")
if CURRENT_SETTINGS["browser"] == "chrome":
options.add_experimental_option("excludeSwitches", ["enable-logging"])
try:
# Try to open a browser window
driver = wrap.driver(executable_path=path, options=options)
driver.quit()
return True
except (WebDriverException, ValueError):
# Delete the drivers we just installed
name = manager.driver.get_name()
driver_dir = path.split(name)[0] + name
if os.path.isdir(driver_dir):
shutil.rmtree(driver_dir)
return False
def change_browser():
with loading("Detecting browsers"):
available_browsers = [
browser.title() for browser in DRIVER_NAMES.keys() if is_installed(browser)
]
if not len(available_browsers):
Colors.print(
"No available browsers. Please install either Chrome or Firefox and try again.",
"Chrome can be installed from https://www.google.com/chrome/.",
"Firefox can ben installed from https://www.mozilla.org/en-US/firefox/new/.",
sep="\n",
properties=["fail", "bold"],
)
sys.exit()
# Print available browsers
print("\n • ".join(["Available Browsers:"] + available_browsers), "\n")
while True:
new_browser = input("Select a browser from the list above: ").strip().title()
if new_browser == "":
continue
if new_browser not in available_browsers:
Colors.print("Invalid selection. Try again.", properties=["fail"])
else:
break
update_setting("browser", new_browser.lower())
Colors.print(
f"Successfully changed browser to {new_browser}!", properties=["success"]
)
def test_sound(repetitions=3, print_info=True):
if print_info:
print("Playing sound...")
sleep(0.15)
for i in range(repetitions):
sound_effects.play(block=True)
def set_sound_mode():
while True:
sound_mode = input(
f"Select a sound mode ({SOUND_MODES[0]}/{SOUND_MODES[1]}/{SOUND_MODES[2]}): "
)
if sound_mode not in SOUND_MODES:
Colors.print(
f'Invalid input for sound mode. Please enter either "{SOUND_MODES[0]}" (no sound),'
f' "{SOUND_MODES[1]}" (plays sound once after coming back in stock), or "{SOUND_MODES[2]}"'
" (plays sound repeatedly until item is no longer in stock).",
properties=["fail"],
)
else:
break
update_setting("sound mode", sound_mode)
Colors.print(
f"Successfully set sound mode to {sound_mode}!", properties=["success"]
)
sleep(1)
print("\nThat sounds like...")
sleep(0.75)
# No sound
if sound_mode == SOUND_MODES[0]:
sleep(0.5)
print("Nothing. Crazy, right?")
# Play sound once
elif sound_mode == SOUND_MODES[1]:
test_sound(repetitions=1, print_info=False)
# Play sound on repeat
elif sound_mode == SOUND_MODES[2]:
test_sound(print_info=False)
def set_threads():
while True:
threads = input("Threads (per URL): ")
threads = validate_num(threads, int)
if threads is None or threads < 1:
Colors.print(
"Invalid number of threads. Please enter an integer greater than or equal to 1.",
properties=["fail"],
)
else:
break
update_setting("threads", threads)
Colors.print(
f"Now using {threads} threads to track each URL!", properties=["success"]
)
def reset_settings():
print("Default settings:")
view_settings(show_default=True)
print()
if yes_or_no("Reset (y/n): "):
_save(DEFAULT_SETTINGS)
Colors.print("Successfully reset settings!", properties=["success"])
else:
print("Settings reset aborted.") | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/data/setting_utils.py | setting_utils.py |
import os
import sys
from getpass import getpass
from keyring.errors import PasswordDeleteError
from keyring.util import properties
from keyrings.cryptfile.cryptfile import CryptFileKeyring
from keyrings.cryptfile.file_base import FileBacked
from best_buy_bullet_bot.data import SHARED_DIR
from best_buy_bullet_bot.utils import Colors, yes_or_no
EMAIL_VAR = "BB_EMAIL"
PASS_VAR = "BB_PASS"
CVV_VAR = "BB_CVV"
@properties.NonDataProperty
def file_path(self):
return os.path.join(SHARED_DIR, self.filename)
FileBacked.file_path = file_path
SERVICE_ID = "3B_BOT"
KR = CryptFileKeyring()
def set_access_pass(access_pass):
KR._keyring_key = access_pass
def authenticate():
attempts = 0
while True:
try:
KR.keyring_key
return
except ValueError:
if str(KR._keyring_key).strip() == "":
sys.exit()
attempts += 1
if attempts >= 3:
print("Too many attempts, please try again later.")
sys.exit()
print("Sorry, try again.")
KR._keyring_key = None
def _get_cred(name, default_value):
cred = KR.get_password(SERVICE_ID, name)
return cred if cred is not None else default_value
def get_creds(default_value=""):
authenticate()
return [_get_cred(var, default_value) for var in [EMAIL_VAR, PASS_VAR, CVV_VAR]]
def _get_input(prompt):
while True:
value = input(prompt)
if yes_or_no("Continue (y/n): "):
return value
def set_creds():
authenticate()
KR.set_password(SERVICE_ID, EMAIL_VAR, _get_input("Email: "))
print()
while True:
password = getpass("Best Buy password: ")
confirm_pass = getpass("Confirm password: ")
if password == confirm_pass:
break
print("Passwords didn't match! Try again.")
KR.set_password(SERVICE_ID, PASS_VAR, password)
print()
KR.set_password(SERVICE_ID, CVV_VAR, _get_input("CVV: "))
Colors.print("Successfully updated credentials!", properties=["success"])
def print_creds():
email, password, cvv = get_creds(Colors.str("EMPTY", ["fail"]))
print("Email:", email)
print("Password:", password)
print("CVV:", cvv)
def clear_creds():
for name in [EMAIL_VAR, PASS_VAR, CVV_VAR]:
try:
KR.delete_password(SERVICE_ID, name)
except PasswordDeleteError:
pass
Colors.print("Credentials cleared!\n", properties=["success", "bold"])
# Check if user wants to reset their password
if yes_or_no("Would you like to reset your password (y/n): "):
os.remove(KR.file_path)
KR.keyring_key | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/data/user_data.py | user_data.py |
import json
import os
import sys
import tempfile
from glob import glob
from best_buy_bullet_bot.utils import Colors
def _read_file():
FP.seek(0)
return json.load(FP)
def _write_file(content):
FP.seek(0)
FP.write(json.dumps(content))
FP.truncate()
# This function is called before killing all processes to
# make sure the temp file is deleted
def close_data():
FP.close()
if num_temp_files == 1 and os.path.isfile(FP.name):
os.remove(FP.name)
# Windows doesn't allow for temporary files to be opened by a subprocess
# by default so we have to pass a flag to do so
def temp_opener(name, flag, mode=0o777):
return os.open(name, flag | os.O_TEMPORARY, mode)
# TODO: Prevent directory from changing based on whether or not we are in root
temp_dir = os.path.dirname(tempfile.mkdtemp())
prefix = "best_buy_bullet_bot_global_vars"
suffix = ".json"
available_temp_files = glob(os.path.join(temp_dir, f"{prefix}*{suffix}"))
num_temp_files = len(available_temp_files)
if num_temp_files == 1:
# Open the existing temp file
FP = open(
os.path.join(temp_dir, available_temp_files[0]),
"r+",
opener=temp_opener if sys.platform == "win32" else None,
)
else:
if num_temp_files > 1:
# Too many temp files
Colors.warn(
f"Too many temporary files detected: {available_temp_files}. Deleting all temporary files."
)
for filename in available_temp_files:
os.remove(os.path.join(temp_dir, filename))
# Create a new temp file since we don't have any
FP = tempfile.NamedTemporaryFile("r+", prefix=prefix, suffix=suffix, dir=temp_dir)
_write_file({})
class ReferenceVar:
"""Points to a specific variable in the temp file.
If a variale in changed by one process all other processes
with that variable will receive that change when trying to
access the variable.
"""
def __init__(self, var_name):
self.var_name = var_name
def __new__(cls, var_name):
# Return the value of the variable if it is a constant
# else return this object
self = super().__new__(cls)
self.__init__(var_name)
return self() if var_name.endswith("constant") else self
def __call__(self):
return _read_file()[self.var_name]
def update(self, new_value, constant=False):
# Update the value of the variable in the temp file
updated_dict = _read_file()
new_name = self.var_name
new_name += "_constant" if constant else ""
updated_dict.update({new_name: new_value})
_write_file(updated_dict)
return new_value if constant else self
if num_temp_files != 1:
# We are in the main process. This is where variables are created.
from keyring.util import platform_
# We store data here so it doesn't get overwritten during an update
shared_dir = os.path.join(
os.path.dirname(platform_.data_root()), "best_buy_bullet_bot"
)
if not os.path.isdir(shared_dir):
os.makedirs(shared_dir)
# Save to temporary file
HEADLESS_WARNED = ReferenceVar("HEADLESS_WARNED").update(False)
SHARED_DIR = ReferenceVar("SHARED_DIR").update(shared_dir, constant=True)
else:
# We are in a separate process. This is where variables are copied over from the main process.
# Copy over all variables in the temp file to the locals dict so they can be imported
for var_name in _read_file():
locals()[var_name.replace("_constant", "")] = ReferenceVar(var_name) | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/best_buy_bullet_bot/data/__init__.py | __init__.py |
import glob
import os.path
from pathlib import Path
def calculate_indentation(string):
return string[: string.index(string.strip())]
def starts_with(string, *substrings, begin=0, end=None):
starts_with_substring = list(map(string[begin:end].startswith, substrings))
if any(starts_with_substring):
return substrings[starts_with_substring.index(True)]
return False
MSG = '''"""
EASTER EGG: Thank you for reading the source code!
To run the bot with a higher priority level and achieve better performance complete the following.
If using Firefox, complete the following before moving on to the next step:
WINDOWS: Open a Command Prompt window with "Run as administrator" https://www.educative.io/edpresso/how-to-run-cmd-as-an-administrator
MAC: Enter the command `su` in your terminal to gain root privileges. Beware your settings may be different in the root session, but you can always return to a normal session with the `exit` command.
Then regardless of your browser:
Run `3b-bot --fast` in your shell.
"""'''
split_msg = MSG.split("\n")
def indent_msg(indent):
return "\n".join(
[indent + msg_line if msg_line.strip() else "" for msg_line in split_msg]
)
if __name__ == "__main__":
# Get path of best_buy_bullet_bot directory
parent_dir = Path(__file__).parents[1]
search_dir = os.path.join(parent_dir, "best_buy_bullet_bot")
for filename in glob.iglob(os.path.join(search_dir, "**"), recursive=True):
# Skip if not a python file
if not filename.endswith(".py"):
continue
with open(filename, "r+") as f:
code = f.read()
lowercase_code = code.lower()
# Skip file if no easter egg comments need to be added
if "easter egg" not in lowercase_code:
continue
lines = code.split("\n")
lower_lines = lowercase_code.split("\n")
for idx, line in enumerate(lines):
line = line.lower().strip()
# Skip line if the text "easter egg" is not in it
if "easter egg" not in line:
continue
# This variable means we will delete the following lines until we find a line that ends in the variable
clear_multiline_string = starts_with(line, "'''", '"""')
# If the multiline comment starts on the previous line
if not clear_multiline_string and not starts_with(line, "'", '"', "#"):
previous_line = lines[idx - 1]
indent = calculate_indentation(previous_line)
previous_line = previous_line.strip()
clear_multiline_string = starts_with(previous_line, "'''", '"""')
if clear_multiline_string:
# Delete the previous line
lines.pop(idx - 1)
idx -= 1
else:
# Its not a comment, just the text "easter egg" laying around somewhere
continue
else:
indent = calculate_indentation(lines[idx])
if clear_multiline_string:
# Delete all subsequent lines until the comment ends
while not lines[idx + 1].strip().endswith(clear_multiline_string):
lines.pop(idx + 1)
lines.pop(idx + 1)
# Replace the current line with the correct message
lines.pop(idx)
lines.insert(idx, indent_msg(indent))
easter_egg_code = "\n".join(lines)
# Update the file with the new code
if easter_egg_code != code:
f.seek(0)
f.write(easter_egg_code)
f.truncate() | 3b-bot | /3b-bot-1.1.0.tar.gz/3b-bot-1.1.0/custom_hooks/easter_egg.py | easter_egg.py |
About
======
3color Press is a flask based application intended to streamline making your own comic based website.
It is a static website generator that takes markdown formatted text files and turns them into new pages.
I am new to programming and I'm kind of brute learning python and flask with this project.
The project is under heavy development and features are being added as we work on them,
however a very functional core set of features is included
For more in depth information on how to use check the doc pages You can see a demo
site generated with version 0.1 of this tool at http://3color.noties.org
Features
* automatic handling of book pages, news pages and single pages
* easily add a page to the main menu
* easily add custom single pages
* News page to collect news feed page
* Support for showing a thumbnail of most recent comic in desired story line on every page
* command line tools for easy management
In Progress Features
* custom themeing support
* toggle-able theme elements
* improvement on handling in site menus
* admin interface
* better error checking
* much more?!
Installation
-------------
The package is available in pypi::
$ pip install 3color-Press
see :doc:`install`
Contribute
----------
If you're interested in contributing or checking out the source code you can take a look at:
* Issue Tracker: https:github.com/chipperdoodles/3color/issues
* Source Code: https:github.com/chipperdoodles/3color
Support
-------
If you're having problems or have some questions,
feel free to check out the github page: https://github.com/chipperdoodles/3color
License
--------
3color-Press is (c) Martin Knobel and contributors and is licensed under a BSD license
see :doc:`license`
| 3color-Press | /3color-Press-0.2.1.tar.gz/3color-Press-0.2.1/README.txt | README.txt |
import click
import subprocess
from datetime import date
from .application import create_site, instfolder
from .tools import publish, misc
from .models import PagesCreator, PageCreator
from .site import coolviews
up = click.UNPROCESSED
@click.group()
def cli():
""" 3color Press command line tool
This provides command line tools to manage your 3color site.
Simply pass a command after 3color to get something done!
The project folder is the 3color-Press folder in your home directory.
Commands available:
\b
build Builds your website as static html to your build folder.
The default build folder is 'build' in your project folder
\b
compress Archives your build directory into a tar.gz file.
Does the same as if your PUB_METHOD is local
\b
publish Pushes your website to your remote server. It will use configured
PUB_METHOD by default unless you supply --pubmethod option.
Using --pubmethod will override your default method and must be one
of these options: sftp, rsync, local, or git.
\b
example: 3color publish --pubmethod rysnc
\b
all Builds and then publishes your website.
This is the same as running '3color build' and then '3color publish'
\b
open Opens the project folder in your file browser
\b
atom If you have the Atom Editor installed,
this will call on atom to open your project folder in atom
\b
newpage Creates a new page (.md file) based on your inputs.
You can pass the option --batch in order to created a batch of pages
with auto page numbering and file naming.
\b
example: 3color newpage --batch
\b
run Runs your website locally on port 5000 and opens http://localhost:5000
in your default web browser. Use this command in order to see
what your website will look like before you build it. Useful for
Theme building. press contrl+c to halt the live server.
"""
pass
@cli.command(name='all')
def build_push():
"""Builds and then publishes your website."""
click.echo('building to build folder')
app = create_site()
coolviews.chill()
click.echo('publishing with default pub method')
publish.publish
@cli.command()
def build():
"""Builds website into static files"""
click.echo('building')
app = create_site()
coolviews.chill()
@cli.command()
def compress():
"""Compress build folder into a tar.gz file"""
click.echo('compressing')
execute(publish.archive)
@cli.command(name='publish')
@click.option('--pubmethod', type=click.Choice(['sftp', 'rsync', 'local', 'git' ]))
def push_site(pubmethod):
"""Publish site to remote server"""
click.echo('publishing')
if pubmethod:
publish.publish(pubmethod)
else:
publish.publish()
# FIXME launches browser windows
@cli.command()
def run():
"""Run website locally in debug mode"""
click.launch('http://localhost:5000/')
app = create_site()
app.run()
@cli.command(name='open')
def open_file():
"""Open project folder"""
click.launch(instfolder)
@cli.command()
@click.option('--batch', is_flag=True, help='For making more than one new page')
@click.option('--pagetype', prompt='Page type to be created',
type=click.Choice(['book', 'news', 'single']), default='book')
# TODO: Create pagetype specific forms
def newpage(batch, pagetype):
"""Create a new page"""
path = misc.page_dir(pagetype)
if batch:
pamount = click.prompt('Amount of new pages to make', type=int)
lname = click.prompt('The title of the Book', default='', type=up)
sname = click.prompt('The shortname of your book (used for filenames)',
default='', type=up)
ptype = pagetype
data = dict(
longname=lname,
shortname=sname,
pagetype=ptype,
path=path,
page_amount=pamount
)
thing = PagesCreator(**data)
thing.write_page()
else:
lname = click.prompt('The title of the Book', default='', type=up)
sname = click.prompt('The shortname of your book (used for filenames)',
default='', type=up)
ptype = pagetype
ptitle = click.prompt('The title of the page',
default='{:%Y-%m-%d}'.format(date.today()))
pnumber = click.prompt('The number of the page', type=int)
chptr = click.prompt('The chapter number', type=int)
img = click.prompt('The name of the image file of your comic page',
default=sname+'_'+str(pnumber)+'.png', type=up)
menu = click.prompt('True or False for link in main menu',
type=bool, default=False)
data = {
"longname": lname,
"shortname": sname,
"pagetype": ptype,
"pagetitle": ptitle,
"pagenumber": pnumber,
"chapter": chptr,
"image": img,
"menu": menu,
"path": path
}
thing = PageCreator(**data)
thing.write_page()
@cli.command()
def atom():
""" Open project folder with atom editor"""
try:
if misc.system == 'Windows':
subprocess.check_call(["atom", instfolder], shell=True)
else:
subprocess.check_call(["atom", instfolder])
except OSError as e:
print(os.strerror(e))
print("The atom editor command line tool not installed")
# @cli.command(name='setup')
# def make_instance():
# """Create your Project folder and copy over default config file"""
# misc.make_home() | 3color-Press | /3color-Press-0.2.1.tar.gz/3color-Press-0.2.1/threecolor/manager.py | manager.py |
import os
import yaml
from . import __version__
from datetime import date
class PageHeader(object):
"""
Class that handles page header data. The page header is a yaml header
at the beginnning of the markdown file that will be turned into a page.
"""
def __init__(self, **kwargs):
# self.longname = kwargs['longname']
# self.pagetype = kwargs['pagetype']
# self.pageamount = kwargs['pageamount']
# self.pagetitle = kwargs['pagetitle']
# self.pagenumber = kwargs['pagenumber']
# self.published = kwargs['pub']
# self.modified = kwargs['mod']
# self.image = kwargs['image']
# self.menu = kwargs['menu']
self.__dict__.update(kwargs)
@property
def header(self):
"""
Make a dict from kwargs
"""
return {
"title": self.pagetitle,
"published": self.pub,
"modified": self.mod,
"page_type": self.pagetype,
"book": {
'title': self.longname,
'chapter': self.chapter,
'page_number': self.pagenumber,
'image': self.image
},
"menu": self.menu,
"version": __version__
}
def write_page(self):
""" Writes the dict from header funtion into a file. This is our page metadata information"""
name = os.path.join(self.path, self.shortname+'_'+str(self.pagenumber)+'.md')
with open(name, "ab") as f:
yaml.dump(self.header, f)
def dump(self):
"""test function"""
name = os.path.join(self.path, self.shortname+'_'+str(self.pagenumber)+'.md')
info = yaml.safe_dump(self.header)
return name+'\n'+info
class PagesCreator(PageHeader):
"""Subclass of PageHeader for making batch yaml headers and markdown files"""
def __init__(self, **kwargs):
super(PagesCreator, self).__init__(**kwargs)
self.index = 0
self.pub = '{:%Y-%m-%d}'.format(date.today())
self.mod = '{:%Y-%m-%d}'.format(date.today())
def header(self, n):
"""
Overrides PageHeader's header funtion to one needed for batch creation
"""
return {
"title": '',
"published": self.pub,
"modified": self.mod,
"page_type": self.pagetype,
"book": {'title': self.longname, 'chapter': '', 'page_number': n, 'image': ''},
"menu": False,
"version": __version__
}
def write_page(self):
""" Writes the dict from header funtion into a file. This is our page metadata information"""
for x in range(1, self.page_amount+1):
name = os.path.join(self.path, self.shortname+'_'+str(self.index+x)+'.md')
number = self.index+x
with open(name, "ab") as f:
yaml.safe_dump(self.header(number), f)
class PageCreator(PageHeader):
def __init__(self, **kwargs):
super(PageCreator, self).__init__(**kwargs)
self.pub = '{:%Y-%m-%d}'.format(date.today())
self.mod = '{:%Y-%m-%d}'.format(date.today()) | 3color-Press | /3color-Press-0.2.1.tar.gz/3color-Press-0.2.1/threecolor/models.py | models.py |
import os
from ..configs import config
from flask import abort, current_app, Blueprint, render_template, send_from_directory
from flask_flatpages import FlatPages
from flask_frozen import Freezer
freezer = Freezer()
pages = FlatPages()
cfg = config.make_usr_cfg()
site = Blueprint('site', __name__,
url_prefix='',
template_folder=cfg['TEMPLATES'],
static_folder=cfg['STATIC'],
static_url_path='/static/site'
)
@site.context_processor
def page_types():
# injects variables for book pages and menu pages, menu pages are used to build main menu links
menu_pages = (p for p in pages if (p['menu']))
book_page = (p for p in pages if 'book' == p['page_type'])
news_page = (p for p in pages if 'news' == p['page_type']) # FIXME: uses same name as route function below
thumb_nail = latest_comic(book_page, current_app.config['THUMB_STORY'], 1)
book_list = (p['page_type'] for p in pages) # FIXME: uses same name as book_list function below
return {
"book_page": book_page,
"menu_pages": menu_pages,
"news_page": news_page,
"thumb_nail": thumb_nail,
"book_list": book_list,
"pages": pages
}
def total_pages(pages, book):
# takes a count of pages in the book and returns sum of pages, used for page navigation
t_pages = (1 for p in pages if p.meta['book'] == book)
t_pages = sum(t_pages)
return t_pages
def latest_comic(pages, book, limit=None):
# for sorting published pages that are books in the main story by latest
l_comic = (p for p in pages if ((p['page_type'] == 'book') and p['book']['title'] == book))
l_comic = sorted(l_comic, reverse=True, key=lambda p: p.meta['published'])
return l_comic[:limit]
def page_feed(pages, limit=None):
# for sorting published pages that are books by latest
l_comic = (p for p in pages if p['page_type'] == 'book')
l_comic = sorted(l_comic, reverse=True, key=lambda p: p.meta['published'])
return l_comic[:limit]
def book_list():
# returns a list of the book titles in book type
first_page = (p for p in pages if p['book']['chapter'] == 1 and p['book']['page_number'] == 1)
book_titles = [p['book']['title'] for p in first_page]
return book_titles
@site.route('/images/<name>')
# static image file delivery
def images(name):
path = current_app.config['IMAGE_DIR']
if '..' in name or name.startswith('/'):
abort(404)
else:
return send_from_directory(path, name)
@freezer.register_generator
# makes sure images in the instance/images folder get built into site
def images_url_generator():
path = os.listdir(current_app.config['IMAGE_DIR'])
for f in path:
yield '/images/'+f
@site.route('/')
def index():
# take 1 most recent page of published comics
front_page = latest_comic(pages, current_app.config['MAIN_STORY'], 1)
return render_template('home.html', front_page=front_page)
@site.route('/books/')
def books():
# finds and lists pages that are chapter: 1 and page_number: 1 in yaml header
first_page = (p for p in pages if p['book']['chapter'] == 1 and p['book']['page_number'] == 1)
return render_template('books.html', first_page=first_page)
@site.route('/news/')
def news():
# renders news template
return render_template('news.html')
# @site.route('/atom.xml')
# atom feed, only works with a patch to werkzeug/contrip/atom.py file will look into more
# https://github.com/mitsuhiko/werkzeug/issues/695
# def atom_feed():
# feed = AtomFeed('Feed for '+current_app.config['SITE_NAME'],
# feed_url=current_app.config['DOMAIN']+url_for('.atom_feed'),
# url=current_app.config['DOMAIN'])
# # comic_feed = (p for p in pages if p.meta['page_type'] != 'single_page')
# comic_feed = page_feed(pages, 10)
# for p in comic_feed:
# feed.add(p.meta['title'],
# content_type='html',
# url=current_app.config['DOMAIN']+p.path+'.html',
# updated=p.meta['published'],
# summary=p.body)
# return feed.get_response()
@site.route('/<name>.html')
def single_page(name):
# route for custom single pages, usually text pages such as about me or f.a.q's
path = '{}/{}'.format(current_app.config['PAGE_DIR'], name)
page = pages.get_or_404(path)
return render_template('page.html', page=page)
@site.route('/news/<name>.html')
def news_page(name):
# route for single pages, usually text pages
path = '{}/{}'.format(current_app.config['NEWS_DIR'], name)
page = pages.get_or_404(path)
return render_template('page.html', page=page)
@site.route('/<book>/c<int:chapter>/p<int:number>/<name>.html')
def comic_page(book, chapter, number, name):
# variables after 'p' are used to create pagination links within the book stories.
# these are only passed into the page.html template and work only on 'comic_page' urls
path = '{}/{}'.format(current_app.config['BOOK_DIR'], name)
p = pages.get_or_404(path)
t_pages = total_pages(pages, p['book']['title'])
minus = p['book']['page_number'] - 1
plus = p['book']['page_number'] + 1
current_book = p['book']['title']
current_chapter = p.meta['book']['chapter']
first_page = (p for p in pages if p['book']['page_number'] == 1 and p['book']['title'] == current_book)
last_page = (p for p in pages if p['book']['page_number'] == t_pages)
previous_page = (p for p in pages if p['book']['page_number'] == minus)
next_page = (p for p in pages if p['book']['page_number'] == plus)
return render_template(
'comic.html',
current_book=current_book,
current_chapter=current_chapter,
p=p,
previous_page=previous_page,
next_page=next_page,
t_pages=t_pages,
last_page=last_page,
first_page=first_page
)
def chill():
# function to build the site into static files
freezer.freeze() | 3color-Press | /3color-Press-0.2.1.tar.gz/3color-Press-0.2.1/threecolor/site/coolviews.py | coolviews.py |
import click
from ..models import PagesCreator, PageCreator
from ..application import page_dir
from ..manager import cli
# TODO currently file is not used
@cli.command()
@click.option('--batch', is_flag=True, help='For making more than one new page')
@click.option('--pagetype', prompt='Page type to be created', type=click.Choice(['book', 'news', 'single']))
def newpage(batch, pagetype):
"""Creates a new page, prompting you for information"""
path = page_dir(pagetype)
if batch:
pamount = click.prompt('Amount of new pages to make', type=int)
lname = click.prompt('The title of the Book', default=None)
sname = click.prompt('The shortname of your book (used for filenames)', default='')
ptype = pagetype
data = {
"longname": lname,
"shortname": sname,
"pagetype": ptype,
"path": path,
"page_amount": pamount
}
thing = PagesCreator(**data)
thing.write_page()
else:
lname = click.prompt('The title of the Book', default='')
sname = click.prompt('The shortname of your book (used for filenames)', default='')
ptype = pagetype
ptitle = click.prompt('The title of the page', default=None)
pnumber = click.prompt('The number of the page', type=int, default=None)
chptr = click.prompt('The chapter number', type=int, default=None)
img = click.prompt('The name of the image file of your comic page', default=sname+'_'+str(pnumber)+'.png')
menu = click.prompt('True or False if you want to show up in main menu', type=bool, default=False)
data = {
"longname": lname,
"shortname": sname,
"pagetype": ptype,
"pagetitle": ptitle,
"pagenumber": pnumber,
"chapter": chptr,
"image": img,
"menu": menu,
"path": path
}
thing = PageCreator(**data)
click.echo(thing.dump()) | 3color-Press | /3color-Press-0.2.1.tar.gz/3color-Press-0.2.1/threecolor/tools/pagecreator.py | pagecreator.py |
import os
import subprocess
from ..application import create_site
from ..configs import config
# TODO: make fabric optional
from fabric.api import *
from fabric.api import execute
from fabric.contrib.project import rsync_project
from fabric.contrib.files import exists
from shutil import make_archive
instfolder = config.instfolder
cfg = config.make_usr_cfg()
# configure user and hostname for remote server
env.user = cfg['USER_NAME']
env.hosts = cfg['REMOTE_SERVER']
pub_method = cfg['PUB_METHOD']
build_dir = cfg['FREEZER_DESTINATION']
def archive():
"""
Makes a local tar.gz file
"""
make_archive(os.path.join(instfolder, '3colorSite'), 'gztar', build_dir)
def rsync():
"""
Uses a wrapper to call rsync to deploy your site with the rsync tool
this has the delete option which will delete any remote files that are
not in your local build folder
"""
local = os.path.join(instfolder, build_dir+'/')
remote = '3colorsite/'
rsync_project(remote, local, delete=True)
# TODO test functionality
def git_deploy():
"""
simply changes the directory to your build directory and calls
git commits to add all files, commit all changes with commit message updated
and then push your commit, then change back to your project directory
"""
project = os.getcwd()
local = os.path.join(instfolder, build_dir)
os.chdir(local)
subprocess.call(['git', 'add', '-A'])
subprocess.call(['git', 'commit', '-a', '-m', 'updated'])
subprocess.call(['git', 'push'])
os.chdir(project)
# TODO: make nicer, add non-fabric plain FTP support
def sftp():
"""
archives then uploads site via fabric sftp and then unarchives on server.
The remote folder for your site will be 3colorsite and contents will be deleted
if the directory exists remotely therefore ensuring to remove changes before the upload
"""
make_archive(os.path.join(instfolder, '3colorSite'), 'gztar', build_dir)
tarfile = os.path.join(instfolder, '3colorSite.tar.gz')
if exists('~/3colorSite'):
run('rm -rf ~/3colorSite/*')
put(tarfile, '~/3colorSite/3colorSite.tar.gz')
with cd('~/3colorSite/'):
run('tar xzf 3colorSite.tar.gz')
run('rm -rf 3colorSite.tar.gz')
else:
run('mkdir ~/3colorSite')
put(tarfile, '~/3colorSite/3colorSite.tar.gz')
with cd('~/3colorSite/'):
run('tar xzf 3colorSite.tar.gz')
run('rm -rf 3colorSite.tar.gz')
os.remove(tarfile)
def publish(pubmethod=pub_method):
"""Main function to pubish site"""
if pubmethod == 'sftp':
execute(sftp)
elif pubmethod == 'rsync':
execute(rsync)
elif pubmethod == 'git':
git_deploy()
elif pubmethod == 'local':
archive()
else:
print("You did not configure your publish method") | 3color-Press | /3color-Press-0.2.1.tar.gz/3color-Press-0.2.1/threecolor/tools/publish.py | publish.py |
# 3d-connectX-env
[![BuildStatus][build-status]][ci-server]
[![PackageVersion][pypi-version]][pypi-home]
[![Stable][pypi-status]][pypi-home]
[![Format][pypi-format]][pypi-home]
[![License][pypi-license]](LICENSE)
![pattern1.gif](assets/pattern1.gif)
[build-status]: https://travis-ci.com/youngeek-0410/3d-connectX-env.svg?branch=main
[ci-server]: https://travis-ci.com/youngeek-0410/3d-connectX-env
[pypi-version]: https://badge.fury.io/py/3d-connectX-env.svg
[pypi-license]: https://img.shields.io/github/license/youngeek-0410/3d-connectX-env
[pypi-status]: https://img.shields.io/pypi/status/3d-connectX-env.svg
[pypi-format]: https://img.shields.io/pypi/format/3d-connectX-env.svg
[pypi-home]: https://badge.fury.io/py/3d-connectX-env
[python-version]: https://img.shields.io/pypi/pyversions/3d-connectX-env.svg
[python-home]: https://python.org
3D connectX repository, developed for the [OpenAI Gym](https://github.com/openai/gym) format.
## Installation
The preferred installation of `3d-connectX-env` is from `pip`:
```shell
pip install 3d-connectX-env
```
## Usage
### Python
```python
import gym_3d_connectX
import gym
env = gym.make('3d-connectX-v0')
env.reset()
env.utils.win_reward = 100
env.utils.draw_penalty = 50
env.utils.lose_penalty = 100
env.utils.could_locate_reward = 10
env.utils.couldnt_locate_penalty = 10
env.utils.time_penalty = 1
env.player = 1
actions = [0, 0, 1, 1, 2, 2, 4, 4, 0, 0, 1, 1, 2, 2, 0, 3]
for action in actions:
obs, reward, done, info = env.step(action)
env.render(mode="plot")
```
## Environments
The environments only send reward-able game-play frames to agents;
No cut-scenes, loading screens, etc. are sent to
an agent nor can an agent perform actions during these instances.
Environment: `3d-connectX-v0`
### Factor at initialization.
| Key | Type | Description
|:------------------------|:---------|:------------------------------------------------------|
| `num_grid ` | `int` | Length of a side.
| `num_win_seq` | `int` | The number of sequence necessary for winning.
| `win_reward` | `float` | The reward agent gets when win the game.
| `draw_penalty` | `float` | The penalty agent gets when it draw the game.
| `lose_penalty` | `float` | The penalty agent gets when it lose the game.
| `couldnt_locate_penalty`| `float` | The penalty agent gets when it choose the location where the stone cannot be placed.
| `could_locate_reward` | `float` | The additional reward for agent being able to put the stone.
| `time_penalty` | `float` | The penalty agents gets along with timesteps.
| `first_player` | `int` | Define which is the first player.
## Step
Info about the rewards and info returned by the `step` method.
| Key | Type | Description
|:-------------------|:---------|:------------------------------------------------------|
| `turn` | `int` | The number of the player at this step
| `winner` | `int` | Value of the player on the winning side
| `is_couldnt_locate`| `bool` | In this step the player chooses where to place the stone.
| 3d-connectX-env | /3d-connectX-env-1.0.1.tar.gz/3d-connectX-env-1.0.1/README.md | README.md |
from typing import Literal
class Writer:
def __init__(self, endian: Literal['big', 'little'] = 'big'):
self.endian = endian
self.buffer = b''
def write(self, data: bytes) -> None:
self.buffer += data
def writeUInteger(self, integer: int, length: int = 1) -> None:
self.buffer += integer.to_bytes(length, self.endian, signed=False)
def writeInteger(self, integer: int, length: int = 1) -> None:
self.buffer += integer.to_bytes(length, self.endian, signed=True)
def writeUInt64(self, integer: int) -> None:
self.writeUInteger(integer, 8)
def writeInt64(self, integer: int) -> None:
self.writeInteger(integer, 8)
def writeFloat(self, floating: float) -> None:
exponent = 0
sign = 1
if floating == 0:
self.writeUInt32(0)
else:
if floating < 0:
sign = -1
floating = -floating
if floating >= 2 ** -1022:
value = floating
while value < 1:
exponent -= 1
value *= 2
while value >= 2:
exponent += 1
value /= 2
mantissa = floating / 2 ** exponent
exponent += 127
as_integer_bin = '0'
if sign == -1:
as_integer_bin = '1'
as_integer_bin += bin(exponent)[2:].zfill(8)
mantissa_bin = ''
for x in range(24):
bit = '0'
if mantissa >= 1 / 2 ** x:
mantissa -= 1 / 2 ** x
bit = '1'
mantissa_bin += bit
mantissa_bin = mantissa_bin[1:]
as_integer_bin += mantissa_bin
as_integer = int(as_integer_bin, 2)
self.writeUInt32(as_integer)
def writeUInt32(self, integer: int) -> None:
self.writeUInteger(integer, 4)
def writeInt32(self, integer: int) -> None:
self.writeInteger(integer, 4)
def writeNUInt16(self, integer: float) -> None:
self.writeUInt16(round(integer * 65535))
def writeUInt16(self, integer: int) -> None:
self.writeUInteger(integer, 2)
def writeNInt16(self, integer: float) -> None:
self.writeInt16(round(integer * 32512))
def writeInt16(self, integer: int) -> None:
self.writeInteger(integer, 2)
def writeUInt8(self, integer: int) -> None:
self.writeUInteger(integer)
def writeInt8(self, integer: int) -> None:
self.writeInteger(integer)
def writeBool(self, boolean: bool) -> None:
if boolean:
self.writeUInt8(1)
else:
self.writeUInt8(0)
writeUInt = writeUInteger
writeInt = writeInteger
writeULong = writeUInt64
writeLong = writeInt64
writeNUShort = writeNUInt16
writeNShort = writeNInt16
writeUShort = writeUInt16
writeShort = writeInt16
writeUByte = writeUInt8
writeByte = writeInt8
def writeChar(self, string: str) -> None:
for char in list(string):
self.buffer += char.encode('utf-8')
def writeString(self, string: str) -> None:
if string is None:
string = ''
encoded = string.encode('utf-8')
self.writeUShort(len(encoded))
self.buffer += encoded | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/utilities/writer.py | writer.py |
import typing
class Reader:
def __init__(self, buffer: bytes, endian: typing.Literal['big', 'little'] = 'big'):
self.buffer = buffer
self.endian = endian
self.i = 0
def read(self, length: int = 1) -> bytes:
result = self.buffer[self.i:self.i + length]
self.i += length
return result
def readUInteger(self, length: int = 1) -> int:
result = 0
for x in range(length):
byte = self.buffer[self.i]
bit_padding = x * 8
if self.endian == 'big':
bit_padding = (8 * (length - 1)) - bit_padding
result |= byte << bit_padding
self.i += 1
return result
def readInteger(self, length: int = 1) -> int:
integer = self.readUInteger(length)
result = integer
if integer > 2 ** (length * 8) / 2:
result -= 2 ** (length * 8)
return result
def readUInt64(self) -> int:
return self.readUInteger(8)
def readInt64(self) -> int:
return self.readInteger(8)
def readFloat(self) -> float:
as_int = self.readUInt32()
binary = bin(as_int)
binary = binary[2:].zfill(32)
sign = -1 if binary[0] == '1' else 1
exponent = int(binary[1:9], 2) - 127
mantissa_base = binary[9:]
mantissa_bin = '1' + mantissa_base
mantissa = 0
val = 1
if exponent == -127:
if mantissa_base[1] == -1:
return 0
else:
exponent = -126
mantissa_bin = '0' + mantissa_base
for char in mantissa_bin:
mantissa += val * int(char)
val = val / 2
result = sign * 2 ** exponent * mantissa
return result
def readUInt32(self) -> int:
return self.readUInteger(4)
def readInt32(self) -> int:
return self.readInteger(4)
def readNUInt16(self) -> float:
return self.readUInt16() / 65535
def readUInt16(self) -> int:
return self.readUInteger(2)
def readNInt16(self) -> float:
return self.readInt16() / 32512
def readInt16(self) -> int:
return self.readInteger(2)
def readUInt8(self) -> int:
return self.readUInteger()
def readInt8(self) -> int:
return self.readInteger()
def readBool(self) -> bool:
if self.readUInt8() >= 1:
return True
else:
return False
readUInt = readUInteger
readInt = readInteger
readULong = readUInt64
readLong = readInt64
readNUShort = readNUInt16
readNShort = readNInt16
readUShort = readUInt16
readShort = readInt16
readUByte = readUInt8
readByte = readInt8
def readChars(self, length: int = 1) -> str:
return self.read(length).decode('utf-8')
def readString(self) -> str:
length = self.readUShort()
return self.readChars(length)
def tell(self) -> int:
return self.i | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/utilities/reader.py | reader.py |
from .matrix3x3 import Matrix3x3
from . import Matrix
from ..math import Vector3, Quaternion
class Matrix4x4(Matrix):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.translation_matrix = None
self.rotation_matrix = None
self.scale_matrix = None
def determinant(self):
value1_1 = self.matrix[0][0]
value1_2 = self.matrix[0][1]
value1_3 = self.matrix[0][2]
value1_4 = self.matrix[0][3]
value2_1 = self.matrix[1][0]
value2_2 = self.matrix[1][1]
value2_3 = self.matrix[1][2]
value2_4 = self.matrix[1][3]
value3_1 = self.matrix[2][0]
value3_2 = self.matrix[2][1]
value3_3 = self.matrix[2][2]
value3_4 = self.matrix[2][3]
value4_1 = self.matrix[3][0]
value4_2 = self.matrix[3][1]
value4_3 = self.matrix[3][2]
value4_4 = self.matrix[3][3]
matrix1 = Matrix3x3(matrix=(
(value2_2, value2_3, value2_4),
(value3_2, value3_3, value3_4),
(value4_2, value4_3, value4_4)
))
matrix2 = Matrix3x3(matrix=(
(value2_1, value2_3, value2_4),
(value3_1, value3_3, value3_4),
(value4_1, value4_3, value4_4)
))
matrix3 = Matrix3x3(matrix=(
(value2_1, value2_2, value2_4),
(value3_1, value3_2, value3_4),
(value4_1, value4_2, value4_4)
))
matrix4 = Matrix3x3(matrix=(
(value2_1, value2_2, value2_3),
(value3_1, value3_2, value3_3),
(value4_1, value4_2, value4_3)
))
det = value1_1 * matrix1.determinant()
det -= value1_2 * matrix2.determinant()
det += value1_3 * matrix3.determinant()
det -= value1_4 * matrix4.determinant()
return det
def cofactor(self):
value1_1 = self.matrix[0][0]
value1_2 = self.matrix[0][1]
value1_3 = self.matrix[0][2]
value1_4 = self.matrix[0][3]
value2_1 = self.matrix[1][0]
value2_2 = self.matrix[1][1]
value2_3 = self.matrix[1][2]
value2_4 = self.matrix[1][3]
value3_1 = self.matrix[2][0]
value3_2 = self.matrix[2][1]
value3_3 = self.matrix[2][2]
value3_4 = self.matrix[2][3]
value4_1 = self.matrix[3][0]
value4_2 = self.matrix[3][1]
value4_3 = self.matrix[3][2]
value4_4 = self.matrix[3][3]
matrix1 = Matrix3x3(matrix=(
(value2_2, value2_3, value2_4),
(value3_2, value3_3, value3_4),
(value4_2, value4_3, value4_4)
))
matrix2 = Matrix3x3(matrix=(
(value2_1, value2_3, value2_4),
(value3_1, value3_3, value3_4),
(value4_1, value4_3, value4_4)
))
matrix3 = Matrix3x3(matrix=(
(value2_1, value2_2, value2_4),
(value3_1, value3_2, value3_4),
(value4_1, value4_2, value4_4)
))
matrix4 = Matrix3x3(matrix=(
(value2_1, value2_2, value2_3),
(value3_1, value3_2, value3_3),
(value4_1, value4_2, value4_3)
))
matrix5 = Matrix3x3(matrix=(
(value1_2, value1_3, value1_4),
(value3_2, value3_3, value3_4),
(value4_2, value4_3, value4_4)
))
matrix6 = Matrix3x3(matrix=(
(value1_1, value1_3, value1_4),
(value3_1, value3_3, value3_4),
(value4_1, value4_3, value4_4)
))
matrix7 = Matrix3x3(matrix=(
(value1_1, value1_2, value1_4),
(value3_1, value3_2, value3_4),
(value4_1, value4_2, value4_4)
))
matrix8 = Matrix3x3(matrix=(
(value1_1, value1_2, value1_3),
(value3_1, value3_2, value3_3),
(value4_1, value4_2, value4_3)
))
matrix9 = Matrix3x3(matrix=(
(value1_2, value1_3, value1_4),
(value2_2, value2_3, value2_4),
(value4_2, value4_3, value4_4)
))
matrix10 = Matrix3x3(matrix=(
(value1_1, value1_3, value1_4),
(value2_1, value2_3, value2_4),
(value4_1, value4_3, value4_4)
))
matrix11 = Matrix3x3(matrix=(
(value1_1, value1_2, value1_4),
(value2_1, value2_2, value2_4),
(value4_1, value4_2, value4_4)
))
matrix12 = Matrix3x3(matrix=(
(value1_1, value1_2, value1_3),
(value2_1, value2_2, value2_3),
(value4_1, value4_2, value4_3)
))
matrix13 = Matrix3x3(matrix=(
(value1_2, value1_3, value1_4),
(value2_2, value2_3, value2_4),
(value3_2, value3_3, value3_4)
))
matrix14 = Matrix3x3(matrix=(
(value1_1, value1_3, value1_4),
(value2_1, value2_3, value2_4),
(value3_1, value3_3, value3_4)
))
matrix15 = Matrix3x3(matrix=(
(value1_1, value1_2, value1_4),
(value2_1, value2_2, value2_4),
(value3_1, value3_2, value3_4)
))
matrix16 = Matrix3x3(matrix=(
(value1_1, value1_2, value1_3),
(value2_1, value2_2, value2_3),
(value3_1, value3_2, value3_3)
))
self.matrix = (
(matrix1.determinant(), matrix2.determinant(), matrix3.determinant(), matrix4.determinant()),
(matrix5.determinant(), matrix6.determinant(), matrix7.determinant(), matrix8.determinant()),
(matrix9.determinant(), matrix10.determinant(), matrix11.determinant(), matrix12.determinant()),
(matrix13.determinant(), matrix14.determinant(), matrix15.determinant(), matrix16.determinant())
)
self.find_cofactor()
def put_rotation(self, quaterion: Quaternion):
x, y, z, w = quaterion.x, quaterion.y, quaterion.z, quaterion.w
rotation_matrix = (
(1-2*y**2-2*z**2, 2*x*y-2*z*w, 2*x*z+2*y*w, 0), # x
(2*x*y+2*z*w, 1-2*x**2-2*z**2, 2*y*z-2*x*w, 0), # y
(2*x*z-2*y*w, 2*y*z+2*x*w, 1-2*x**2-2*y**2, 0), # z
(0, 0, 0, 1)
)
self.rotation_matrix = Matrix4x4(matrix=rotation_matrix)
def put_position(self, position: Vector3):
translation_matrix = (
(1, 0, 0, position.x), # x
(0, 1, 0, position.y), # y
(0, 0, 1, position.z), # z
(0, 0, 0, 1)
)
self.translation_matrix = Matrix4x4(matrix=translation_matrix)
def put_scale(self, scale: Vector3):
scale_matrix = (
(scale.x, 0, 0, 0),
(0, scale.y, 0, 0),
(0, 0, scale.z, 0),
(0, 0, 0, 1)
)
self.scale_matrix = Matrix4x4(matrix=scale_matrix)
def get_rotation(self) -> Quaternion:
return Quaternion()
def get_position(self) -> Vector3:
position = Vector3(self.matrix[0][3], self.matrix[1][3], self.matrix[2][3])
self.put_position(position)
return position
def get_scale(self) -> Vector3:
scale = Vector3(1, 1, 1)
self.put_scale(scale)
return scale | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/utilities/matrix/matrix4x4.py | matrix4x4.py |
from .matrix2x2 import Matrix2x2
from . import Matrix
class Matrix3x3(Matrix):
def determinant(self):
value1_1 = self.matrix[0][0]
value1_2 = self.matrix[0][1]
value1_3 = self.matrix[0][2]
value2_1 = self.matrix[1][0]
value2_2 = self.matrix[1][1]
value2_3 = self.matrix[1][2]
value3_1 = self.matrix[2][0]
value3_2 = self.matrix[2][1]
value3_3 = self.matrix[2][2]
matrix1 = Matrix2x2(matrix=(
(value2_2, value2_3),
(value3_2, value3_3)
))
matrix2 = Matrix2x2(matrix=(
(value2_1, value2_3),
(value3_1, value3_3)
))
matrix3 = Matrix2x2(matrix=(
(value2_1, value2_2),
(value3_1, value3_2)
))
det = value1_1 * matrix1.determinant()
det -= value1_2 * matrix2.determinant()
det += value1_3 * matrix3.determinant()
return det
def cofactor(self):
value1_1 = self.matrix[0][0]
value1_2 = self.matrix[0][1]
value1_3 = self.matrix[0][2]
value2_1 = self.matrix[1][0]
value2_2 = self.matrix[1][1]
value2_3 = self.matrix[1][2]
value3_1 = self.matrix[2][0]
value3_2 = self.matrix[2][1]
value3_3 = self.matrix[2][2]
matrix1 = Matrix2x2(matrix=(
(value2_2, value2_3),
(value3_2, value3_3)
))
matrix2 = Matrix2x2(matrix=(
(value2_1, value2_3),
(value3_1, value3_3)
))
matrix3 = Matrix2x2(matrix=(
(value2_1, value2_2),
(value3_1, value3_2)
))
matrix4 = Matrix2x2(matrix=(
(value1_2, value1_3),
(value3_2, value3_3)
))
matrix5 = Matrix2x2(matrix=(
(value1_1, value1_3),
(value3_1, value3_3)
))
matrix6 = Matrix2x2(matrix=(
(value1_1, value1_2),
(value3_1, value3_2)
))
matrix7 = Matrix2x2(matrix=(
(value1_2, value1_3),
(value2_2, value2_3)
))
matrix8 = Matrix2x2(matrix=(
(value1_1, value1_3),
(value2_1, value2_3)
))
matrix9 = Matrix2x2(matrix=(
(value1_1, value1_2),
(value2_1, value2_2)
))
self.matrix = (
(matrix1.determinant(), matrix2.determinant(), matrix3.determinant()),
(matrix4.determinant(), matrix5.determinant(), matrix6.determinant()),
(matrix7.determinant(), matrix8.determinant(), matrix9.determinant())
)
cofactor_matrix = []
for row in range(len(self.matrix)):
new_row = []
for column in range(len(self.matrix[0])):
new_row.append(self.matrix[row][column] * (-1) ** (row + column))
cofactor_matrix.append(new_row)
self.matrix = cofactor_matrix | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/utilities/matrix/matrix3x3.py | matrix3x3.py |
class Matrix:
def __init__(self, **kwargs):
matrix = None
if 'matrix' in kwargs:
matrix = kwargs['matrix']
if 'size' in kwargs:
self.size = kwargs['size']
matrix = self.get_identity_matrix(
self.size
)
else:
self.size = (
len(matrix[0]), # x
len(matrix) # y
)
self.matrix = matrix
def __matmul__(self, other):
if len(self.matrix) != len(other.matrix[0]) or len(self.matrix[0]) != len(other.matrix):
raise TypeError('Матрицы не могут быть перемножены')
else:
multiplied_matrix = []
for row in self.matrix:
matrix_row = []
for column in range(4):
s = 0
for i in range(4):
s += row[i] * other.matrix[i][column]
matrix_row.append(s)
multiplied_matrix.append(matrix_row)
return Matrix(matrix=multiplied_matrix)
def __mul__(self, other: int or float):
multiplied_matrix = []
for row in range(len(self.matrix)):
new_row = []
for column in range(len(self.matrix[0])):
new_row.append(self.matrix[row][column]*other)
multiplied_matrix.append(new_row)
return Matrix(matrix=multiplied_matrix)
def __repr__(self):
return f'{self.__class__.__name__} <{self.matrix}>'
def __str__(self):
return str(self.matrix)
@staticmethod
def get_identity_matrix(size: tuple[int, int]):
matrix = []
for y in range(size[1]):
row = []
for x in range(size[0]):
element = 0
if x == y:
element = 1
row.append(element)
matrix.append(row)
return matrix
def find_cofactor(self):
cofactor_matrix = []
for row in range(len(self.matrix)):
new_row = []
for column in range(len(self.matrix[0])):
new_row.append(self.matrix[row][column] * (-1) ** (row + column))
cofactor_matrix.append(new_row)
self.matrix = cofactor_matrix
def transpose(self):
height = len(self.matrix)
width = len(self.matrix[0])
self.matrix = [[self.matrix[row][col] for row in range(height)] for col in range(width)]
self.matrix = [tuple(row) for row in self.matrix]
self.matrix = tuple(self.matrix)
def determinant(self):
det = 0
return det
def cofactor(self):
pass
def inverse(self):
det = self.determinant()
if det != 0:
self.transpose()
self.cofactor()
self.matrix = self.__mul__(1 / det).matrix
return self
__all__ = [
'matrix2x2',
'matrix3x3',
'matrix4x4'
] | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/utilities/matrix/__init__.py | __init__.py |
from typing import List
from models_converter.utilities.math import Quaternion
from models_converter.utilities.math import Vector3
class Node:
class Instance:
class Bind:
def __init__(self, symbol: str = None, target: str = None):
self._symbol = symbol
self._target = target
def get_symbol(self) -> str or None:
return self._symbol
def get_target(self) -> str or None:
return self._target
def __init__(self, *, name: str, instance_type: str):
self._name: str = name
self._type: str = instance_type
self._target: str or None = None
self._binds = []
def __repr__(self) -> str:
return f'{self._name} - {self._type}'
def get_name(self) -> str:
return self._name
def get_type(self) -> str:
return self._type
def get_target(self) -> str:
return self._target
def set_target(self, target: str):
self._target = target
def get_binds(self) -> list:
return self._binds
def add_bind(self, symbol: str, target: str):
self._binds.append(Node.Instance.Bind(symbol, target))
class Frame:
def __init__(self, frame_id: int, position: Vector3 = None, scale: Vector3 = None, rotation: Quaternion = None):
self._id: int = frame_id
self._position: Vector3 = position
self._scale: Vector3 = scale
self._rotation: Quaternion = rotation
def get_id(self) -> int:
return self._id
def get_rotation(self) -> Quaternion:
return self._rotation
def set_rotation(self, rotation: Quaternion):
self._rotation = rotation
def get_position(self) -> Vector3:
return self._position
def set_position(self, position: Vector3):
self._position = position
def get_scale(self) -> Vector3:
return self._scale
def set_scale(self, scale: Vector3):
self._scale = scale
def __init__(self, *, name: str, parent: str):
self.frames_settings = 0
self._name: str = name
self._parent: str or None = parent
self._instances = []
self._frames = []
def __repr__(self) -> str:
result = self._name
if self._parent:
result += " <- " + self._parent
return f'Node({result})'
def get_name(self) -> str:
return self._name
def get_parent(self) -> str or None:
return self._parent
def get_instances(self) -> List[Instance]:
return self._instances
def add_instance(self, instance: Instance):
self._instances.append(instance)
def get_frames(self) -> List[Frame]:
return self._frames
def add_frame(self, frame: Frame):
self._frames.append(frame)
def set_frames(self, frames: List[Frame]):
self._frames.clear()
for frame in frames:
self._frames.append(frame) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/universal/node.py | node.py |
from typing import List
class Geometry:
class Vertex:
def __init__(self, *,
name: str,
vertex_type: str,
vertex_index: int,
vertex_scale: float,
points: List[List[float]]):
self._name: str = name
self._type: str = vertex_type
self._index: int = vertex_index
self._scale: float = vertex_scale
self._points: List[List[float]] = points
def get_name(self) -> str:
return self._name
def get_type(self) -> str:
return self._type
def get_index(self) -> int:
return self._index
def get_point_size(self) -> float:
return len(self._points[0])
def get_scale(self) -> float:
return self._scale
def get_points(self) -> List[List[float]]:
return self._points
class Material:
def __init__(self, name: str, triangles: List[List[List[int]]]):
self._name: str = name
self._triangles: List[List[List[int]]] = triangles
def get_name(self) -> str:
return self._name
def get_triangles(self) -> List[List[List[int]]]:
return self._triangles
class Joint:
def __init__(self, name: str, matrix: List[float] or None):
self._name: str = name
self._matrix: List[float] or None = matrix
def get_name(self) -> str:
return self._name
def get_matrix(self) -> List[float]:
return self._matrix
def set_matrix(self, matrix: List[float]):
self._matrix = matrix
class Weight:
def __init__(self, joint_index: int, strength: float):
self._joint_index: int = joint_index
self._strength: float = strength
def get_joint_index(self) -> int:
return self._joint_index
def get_strength(self) -> float:
return self._strength
def __init__(self, *, name: str, group: str = None):
self._name: str = name
self._group: str or None = group
self._vertices: List[Geometry.Vertex] = []
self._materials: List[Geometry.Material] = []
self._bind_matrix: List[float] or None = None
self._joints: List[Geometry.Joint] = []
self._weights: List[Geometry.Weight] = []
def get_name(self) -> str:
return self._name
def get_group(self) -> str or None:
return self._group
def get_vertices(self) -> List[Vertex]:
return self._vertices
def add_vertex(self, vertex: Vertex):
self._vertices.append(vertex)
def get_materials(self) -> List[Material]:
return self._materials
def add_material(self, material: Material):
self._materials.append(material)
def has_controller(self) -> bool:
return self._bind_matrix is not None
def get_bind_matrix(self) -> list:
return self._bind_matrix
def set_controller_bind_matrix(self, matrix: List[float]):
self._bind_matrix = matrix
def get_joints(self) -> List[Joint]:
return self._joints
def add_joint(self, joint: Joint):
self._joints.append(joint)
def get_weights(self) -> List[Weight]:
return self._weights
def add_weight(self, weight: Weight):
self._weights.append(weight) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/universal/geometry.py | geometry.py |
import json
from models_converter.interfaces import WriterInterface
class Writer(WriterInterface):
MAGIC = b'glTF'
def __init__(self):
self.writen = self.MAGIC
self.data = bytes()
self.asset = {"version": "2.0"}
self.scene = 0
self.scenes = [{
"nodes": []
}]
self.nodes = []
self.buffers = []
self.buffer_views = []
self.accessors = []
self.meshes = []
self.materials = []
self.textures = []
self.images = []
self.samplers = []
def add_root_node_index(self, index):
self.scenes[0]["nodes"].append(index)
def add_node(self, node):
self.nodes.append(node)
def add_mesh(self, mesh):
self.meshes.append(mesh)
def add_material(self, material):
self.materials.append(material)
def add_texture(self, texture):
self.textures.append(texture)
def add_image(self, image):
self.images.append(image)
def add_sampler(self, sampler):
self.samplers.append(sampler)
def add_data(self, data):
offset = len(self.data)
self.data += data
return offset
def add_buffer_view(self, buffer_view):
index = len(self.buffer_views)
self.buffer_views.append(buffer_view)
return index
def add_accessor(self, accessor):
index = len(self.accessors)
self.accessors.append(accessor)
return index
def as_dict(self):
return {
"asset": self.asset,
"scene": self.scene,
"scenes": self.scenes,
"nodes": self.nodes,
"buffers": self.buffers,
"bufferViews": self.buffer_views,
"accessors": self.accessors,
"meshes": self.meshes,
"materials": self.materials,
"textures": self.textures,
"images": self.images,
"samplers": self.samplers,
}
def write(self, data: dict):
print(data)
json_data = json.dumps(self.as_dict())
self.buffers.append({
"byteLength": len(self.data)
})
# pad json data with spaces
json_data += " " * (4 - len(json_data) % 4)
# pad binary data with null bytes
self.data += bytes((4 - len(self.data) % 4))
self.writen += (2).to_bytes(4, 'little')
self.writen += (len(json_data) + len(self.data) + 28).to_bytes(4, 'little')
self.writen += len(json_data).to_bytes(4, 'little') + b'JSON' + json_data.encode()
self.writen += len(self.data).to_bytes(4, 'little') + b'BIN\x00' + self.data | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/gltf/writer.py | writer.py |
from models_converter.utilities.math import Vector3, Quaternion
from models_converter.utilities.matrix.matrix4x4 import Matrix4x4
def to_camelcase(property_name: str):
words = property_name.split('_')
for word_index in range(len(words)):
word = words[word_index]
if word_index > 0:
word = word.capitalize()
words[word_index] = word
camelcase_name = ''.join(words)
return camelcase_name
def to_lowercase(property_name: str) -> str:
result = ''
for char in property_name:
if char.isupper():
char = f'_{char.lower()}'
result += char
return result
class GlTFProperty:
def __init__(self):
self.extensions = None
self.extras = None
def from_dict(self, dictionary: dict):
if dictionary:
for key, value in dictionary.items():
attribute_name = to_lowercase(key)
value_type = type(value)
attribute_value = getattr(self, attribute_name)
if attribute_value is None or value_type in (int, str, bool):
attribute_value = value
elif type(attribute_value) in (Vector3, Quaternion, Matrix4x4) and type(value) is list:
attribute_value = type(attribute_value)(*value)
elif issubclass(attribute_value, GlTFProperty):
if value_type is list:
value_type = attribute_value
values = []
for item in value:
new_value = value_type()
new_value.from_dict(item)
values.append(new_value)
attribute_value = values
else:
attribute_value = attribute_value()
attribute_value.from_dict(value)
setattr(self, attribute_name, attribute_value)
def to_dict(self) -> dict:
dictionary = {}
for key, value in self.__dict__.items():
if value is not None:
attribute_name = to_camelcase(key)
value_type = type(value)
attribute_value = None
if value_type is list:
attribute_value = []
for item in value:
item_type = type(item)
if issubclass(item_type, GlTFProperty):
item = item.to_dict()
attribute_value.append(item)
elif issubclass(value_type, GlTFProperty):
attribute_value = value.to_dict()
elif attribute_value is None:
attribute_value = value
dictionary[attribute_name] = attribute_value
return dictionary
def __getitem__(self, item):
item = to_lowercase(item)
if hasattr(self, item):
return getattr(self, item)
else:
raise IndexError('The object has no attribute named ' + item)
def __repr__(self) -> str:
return f'<{self.__class__.__name__} ({self.to_dict()})>' | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/gltf/gltf_property.py | gltf_property.py |
import json
from models_converter.formats import universal
from models_converter.formats.gltf.chunk import GlTFChunk
from models_converter.formats.gltf.gltf import GlTF
from models_converter.formats.gltf.node import Node
from models_converter.formats.universal import Scene, Geometry
from models_converter.interfaces import ParserInterface
from models_converter.utilities.reader import Reader
class Parser(ParserInterface):
def __init__(self, data: bytes):
self.file_data = data
self.scene = Scene()
self.version = None
self.length = None
self.json_chunk = None
self.bin_chunk = None
self.buffer_views = []
self.accessors = []
self.buffers = []
self.gltf = GlTF()
def parse_bin(self):
reader = Reader(self.bin_chunk.data, 'little')
for buffer in self.gltf.buffers:
parsed_buffer = reader.read(buffer.byte_length)
self.buffers.append(parsed_buffer)
for buffer_view in self.gltf.buffer_views:
reader.__init__(self.buffers[buffer_view.buffer], 'little')
reader.read(buffer_view.byte_offset)
length = buffer_view.byte_length
data = reader.read(length)
self.buffer_views.append(data)
for accessor in self.gltf.accessors:
reader.__init__(self.buffer_views[accessor.buffer_view], 'little')
reader.read(accessor.byte_offset)
types = {
5120: (reader.readByte, 1),
5121: (reader.readUByte, 1),
5122: (reader.readShort, 2),
5123: (reader.readUShort, 2),
5125: (reader.readUInt32, 4),
5126: (reader.readFloat, 4)
}
if accessor.normalized:
types = {
5120: (lambda: max(reader.readByte() / 127, -1.0), 1),
5121: (lambda: reader.readUByte() / 255, 1),
5122: (lambda: max(reader.readShort() / 32767, -1.0), 2),
5123: (lambda: reader.readUShort() / 65535, 2),
5125: (reader.readUInt32, 4),
5126: (reader.readFloat, 4)
}
items_count = {
'SCALAR': 1,
'VEC2': 2,
'VEC3': 3,
'VEC4': 4,
'MAT2': 4,
'MAT3': 9,
'MAT4': 16
}
components_count = items_count[accessor.type]
read_type, bytes_per_element = types[accessor.component_type]
default_stride = bytes_per_element * components_count
stride = self.gltf.buffer_views[accessor.buffer_view].byte_stride or default_stride
elements_per_stride = stride // bytes_per_element
elements_count = accessor.count * elements_per_stride
temp_list = []
for i in range(elements_count):
temp_list.append(read_type())
self.accessors.append([
temp_list[i:i + components_count]
for i in range(0, elements_count, elements_per_stride)
])
def parse(self):
reader = Reader(self.file_data, 'little')
magic = reader.read(4)
if magic != b'glTF':
raise TypeError('Wrong file magic! "676c5446" expected, but given is ' + magic.hex())
self.version = reader.readUInt32()
self.length = reader.readUInt32()
self.json_chunk = GlTFChunk()
self.bin_chunk = GlTFChunk()
self.json_chunk.chunk_length = reader.readUInt32()
self.json_chunk.chunk_name = reader.read(4)
self.json_chunk.data = reader.read(self.json_chunk.chunk_length)
self.bin_chunk.chunk_length = reader.readUInt32()
self.bin_chunk.chunk_name = reader.read(4)
self.bin_chunk.data = reader.read(self.bin_chunk.chunk_length)
self.gltf.from_dict(json.loads(self.json_chunk.data))
self.parse_bin()
scene_id = self.gltf.scene
scene = self.gltf.scenes[scene_id]
for node_id in scene.nodes:
node = self.gltf.nodes[node_id]
self.parse_node(node)
# TODO: animations
# for animation in self.gltf.animations:
# for channel in animation.channels:
# sampler: Animation.AnimationSampler = animation.samplers[channel.sampler]
# input_accessor = self.accessors[sampler.input]
def parse_node(self, gltf_node: Node, parent: str = None):
node_name = gltf_node.name.split('|')[-1]
node = universal.Node(
name=node_name,
parent=parent
)
instance = None
if gltf_node.mesh is not None and type(self.gltf.meshes) is list:
mesh = self.gltf.meshes[gltf_node.mesh]
mesh_name = mesh.name.split('|')
group = 'GEO'
name = mesh_name[0]
if len(mesh_name) > 1:
group = mesh_name[0]
name = mesh_name[1]
geometry = Geometry(name=name, group=group)
if gltf_node.skin is not None:
instance = universal.Node.Instance(name=geometry.get_name(), instance_type='CONT')
geometry.set_controller_bind_matrix([1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1])
skin_id = gltf_node.skin
skin = self.gltf.skins[skin_id]
bind_matrices = self.accessors[skin.inverse_bind_matrices]
bind_matrices = [[m[0::4], m[1::4], m[2::4], m[3::4]] for m in bind_matrices]
for matrix_index in range(len(bind_matrices)):
m = bind_matrices[matrix_index]
matrix = m[0]
matrix.extend(m[1])
matrix.extend(m[2])
matrix.extend(m[3])
bind_matrices[matrix_index] = matrix
for joint in skin.joints:
joint_index = skin['joints'].index(joint)
joint_node = self.gltf.nodes[joint]
joint_name = joint_node['name']
matrix = bind_matrices[joint_index]
geometry.add_joint(Geometry.Joint(joint_name, matrix))
else:
instance = universal.Node.Instance(name=geometry.get_name(), instance_type='GEOM')
position_offset = 0
normal_offset = 0
texcoord_offset = 0
for primitive in mesh.primitives:
if primitive.to_dict() != {}:
primitive_index = mesh.primitives.index(primitive)
attributes = primitive.attributes
material_id = primitive.material
polygons_id = primitive.indices
triangles = self.accessors[polygons_id]
material = self.gltf.materials[material_id]
material_name = material.extensions['SC_shader']['name']
instance.add_bind(material_name, material_name)
position = []
normal = []
texcoord = []
joint_ids = 0
for attribute_id in attributes:
attribute = attributes[attribute_id]
points = None
if attribute_id == 'POSITION':
position = self.accessors[attribute]
points = list(map(
lambda point: (
point[0] * gltf_node.scale.x + gltf_node.translation.x,
point[1] * gltf_node.scale.y + gltf_node.translation.y,
point[2] * gltf_node.scale.z + gltf_node.translation.z
),
position
))
elif attribute_id == 'NORMAL':
normal = self.accessors[attribute]
points = list(map(
lambda point: (
point[0] * gltf_node.scale.x,
point[1] * gltf_node.scale.y,
point[2] * gltf_node.scale.z
),
normal
))
elif attribute_id.startswith('TEXCOORD'):
texcoord = self.accessors[attribute]
texcoord = [[item[0], 1 - item[1]] for item in texcoord]
attribute_id = 'TEXCOORD'
points = texcoord
elif attribute_id.startswith('JOINTS'):
joint_ids = self.accessors[attribute]
elif attribute_id.startswith('WEIGHTS'):
weights = self.accessors[attribute]
for x in range(len(joint_ids)):
geometry.add_weight(Geometry.Weight(joint_ids[x][0], weights[x][0] / 255))
geometry.add_weight(Geometry.Weight(joint_ids[x][1], weights[x][1] / 255))
geometry.add_weight(Geometry.Weight(joint_ids[x][2], weights[x][2] / 255))
geometry.add_weight(Geometry.Weight(joint_ids[x][3], weights[x][3] / 255))
if points:
geometry.add_vertex(Geometry.Vertex(
name=f'{attribute_id.lower()}_{primitive_index}',
vertex_type=attribute_id,
vertex_index=len(geometry.get_vertices()),
vertex_scale=1,
points=points
))
triangles = [
[
[
point[0] + normal_offset,
point[0] + position_offset,
point[0] + texcoord_offset
] for point in triangles[x:x + 3]
] for x in range(0, len(triangles), 3)
]
geometry.add_material(Geometry.Material(material_name, triangles))
for attribute_id in attributes:
if attribute_id == 'POSITION':
position_offset += len(position)
elif attribute_id == 'NORMAL':
normal_offset += len(normal)
elif attribute_id.startswith('TEXCOORD'):
texcoord_offset += len(texcoord)
self.scene.add_geometry(geometry)
if instance is not None:
node.add_instance(instance)
self.scene.add_node(node)
node.add_frame(universal.Node.Frame(
0,
gltf_node.translation,
gltf_node.scale,
gltf_node.rotation
))
if gltf_node.children:
for child_id in gltf_node.children:
child = self.gltf.nodes[child_id]
self.parse_node(child, node_name) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/gltf/parser.py | parser.py |
from ..universal import Scene
from ...interfaces import ParserInterface
from ...utilities.reader import Reader
from .chunks import *
class Parser(ParserInterface):
def __init__(self, file_data: bytes):
self.file_data = file_data
self.scene = Scene()
self.chunks = []
self.header = None
def parse(self):
reader = Reader(self.file_data)
file_magic = reader.read(4)
if file_magic != b'SC3D':
raise TypeError('File Magic isn\'t "SC3D"')
self._split_chunks(reader)
for chunk in self.chunks:
chunk_name = chunk['chunk_name']
chunk_data = chunk['data']
if chunk_name == 'HEAD':
head = HEAD()
head.parse(chunk_data)
self.header = head
elif chunk_name == 'MATE':
mate = MATE(self.header)
mate.parse(chunk_data)
self.scene.add_material(mate)
elif chunk_name == 'GEOM':
geom = GEOM(self.header)
geom.parse(chunk_data)
self.scene.add_geometry(geom.geometry)
elif chunk_name == 'CAME':
came = CAME(self.header)
came.parse(chunk_data)
self.scene.add_camera(came.camera)
elif chunk_name == 'NODE':
node = NODE(self.header)
node.parse(chunk_data)
self.scene.get_nodes().extend(node.nodes)
elif chunk_name == 'WEND':
wend = WEND()
wend.parse(chunk_data)
else:
raise TypeError(f'Unknown chunk: {chunk_name}')
def _split_chunks(self, reader: Reader):
# len(Chunk Length) + len(Chunk Name) + len(Chunk CRC)
while reader.tell() <= len(self.file_data) - 12:
chunk_length = reader.readUInt32()
chunk_name = reader.readChars(4)
chunk_data = reader.read(chunk_length)
chunk_crc = reader.readUInt32()
self.chunks.append({
'chunk_name': chunk_name,
'data': chunk_data,
'crc': chunk_crc
}) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/scw/parser.py | parser.py |
from models_converter.utilities.math import Vector3
from models_converter.utilities.math import Quaternion
from . import Chunk
from ...universal.node import Node
class NODE(Chunk):
def __init__(self, header):
super().__init__(header)
self.chunk_name = 'NODE'
self.nodes = []
def parse(self, buffer: bytes):
super().parse(buffer)
nodes_count = self.readUShort()
for node_index in range(nodes_count):
node = Node(
name=self.readString(),
parent=self.readString()
)
instances_count = self.readUShort()
for x in range(instances_count):
instance = Node.Instance(
instance_type=self.readChars(4),
name=self.readString()
)
if instance.get_type() in ('GEOM', 'CONT'):
materials_count = self.readUShort()
for bind in range(materials_count):
symbol = self.readString()
target = self.readString()
instance.add_bind(symbol, target)
elif instance.get_type() == 'CAME':
instance.set_target(self.readString())
node.add_instance(instance)
frames_count = self.readUShort()
if frames_count > 0:
rotation = Quaternion()
position = Vector3()
scale = Vector3()
node.frames_settings = self.readUByte()
for frame_index in range(frames_count):
frame = Node.Frame(self.readUShort())
if node.frames_settings & 1 or frame_index == 0: # Rotation
rotation.x = self.readNShort()
rotation.y = self.readNShort()
rotation.z = self.readNShort()
rotation.w = self.readNShort()
if node.frames_settings & 2 or frame_index == 0: # Position X
position.x = self.readFloat()
if node.frames_settings & 4 or frame_index == 0: # Position Y
position.y = self.readFloat()
if node.frames_settings & 8 or frame_index == 0: # Position Z
position.z = self.readFloat()
if node.frames_settings & 16 or frame_index == 0: # Scale X
scale.x = self.readFloat()
if node.frames_settings & 32 or frame_index == 0: # Scale Y
scale.y = self.readFloat()
if node.frames_settings & 64 or frame_index == 0: # Scale Z
scale.z = self.readFloat()
frame.set_rotation(rotation.clone())
frame.set_position(position.clone())
frame.set_scale(scale.clone())
node.add_frame(frame)
self.nodes.append(node)
def encode(self):
super().encode()
self.writeUShort(len(self.nodes))
for node in self.nodes:
self.writeString(node.get_name())
self.writeString(node.get_parent())
self.writeUShort(len(node.get_instances()))
for instance in node.get_instances():
self.writeChar(instance.get_type())
self.writeString(instance.get_name())
self.writeUShort(len(instance.get_binds()))
for bind in instance.get_binds():
self.writeString(bind.get_symbol())
self.writeString(bind.get_target())
self._encode_frames(node.get_frames(), node.frames_settings)
self.length = len(self.buffer)
def _encode_frames(self, frames, frames_settings):
self.writeUShort(len(frames))
if len(frames) > 0:
self.writeUByte(frames_settings)
for frame in frames:
self.writeUShort(frame.get_id())
if frames_settings & 128 or frames.index(frame) == 0: # Rotation
rotation = frame.get_rotation()
self.writeNShort(rotation.x)
self.writeNShort(rotation.y)
self.writeNShort(rotation.z)
self.writeNShort(rotation.w)
if frames_settings & 16 or frames.index(frame) == 0: # Position X
self.writeFloat(frame.get_position().x)
if frames_settings & 32 or frames.index(frame) == 0: # Position Y
self.writeFloat(frame.get_position().y)
if frames_settings & 64 or frames.index(frame) == 0: # Position Z
self.writeFloat(frame.get_position().z)
if frames_settings & 2 or frames.index(frame) == 0: # Scale X
self.writeFloat(frame.get_scale().x)
if frames_settings & 4 or frames.index(frame) == 0: # Scale Y
self.writeFloat(frame.get_scale().y)
if frames_settings & 8 or frames.index(frame) == 0: # Scale Z
self.writeFloat(frame.get_scale().z) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/scw/chunks/node.py | node.py |
from . import Chunk
from ...universal import Geometry
class GEOM(Chunk):
def __init__(self, header):
super().__init__(header)
self.chunk_name = 'GEOM'
self.geometry: Geometry or None = None
def parse(self, buffer: bytes):
super().parse(buffer)
self.geometry = Geometry(
name=self.readString(),
group=self.readString()
)
if self.header.version < 2:
matrix = []
for x in range(4):
temp_list = []
for x1 in range(4):
temp_list.append(self.readFloat())
matrix.append(temp_list)
self._parse_vertices()
self._parse_skin()
self._parse_materials()
def _parse_vertices(self):
vertex_count = self.readUByte()
for x in range(vertex_count):
vertex_type = self.readString()
vertex_index = self.readUByte()
self.readUByte() # sub_index
vertex_stride = self.readUByte()
vertex_scale = self.readFloat()
vertex_count = self.readUInt32()
if vertex_type == 'VERTEX':
vertex_type = 'POSITION'
coordinates = []
for x1 in range(vertex_count):
coordinates_massive = [self.readNShort() for _ in range(vertex_stride)]
if vertex_type == 'TEXCOORD':
coordinates_massive[1::2] = [1 - x for x in coordinates_massive[1::2]]
coordinates.append(coordinates_massive)
self.geometry.add_vertex(Geometry.Vertex(
name=f'{vertex_type.lower()}_0',
vertex_type=vertex_type,
vertex_index=vertex_index,
vertex_scale=vertex_scale,
points=coordinates)
)
def _parse_skin(self):
has_controller = self.readBool()
if has_controller:
self.geometry.set_controller_bind_matrix([self.readFloat() for _ in range(16)])
self._parse_joints()
self._parse_weights()
def _parse_joints(self):
joint_counts = self.readUByte()
for x in range(joint_counts):
joint_name = self.readString()
joint_matrix = [self.readFloat() for _ in range(16)]
self.geometry.add_joint(Geometry.Joint(joint_name, joint_matrix))
def _parse_weights(self):
vertex_weights_count = self.readUInt32()
for x in range(vertex_weights_count):
joint_a = self.readUByte()
joint_b = self.readUByte()
joint_c = self.readUByte()
joint_d = self.readUByte()
weight_a = self.readNUShort()
weight_b = self.readNUShort()
weight_c = self.readNUShort()
weight_d = self.readNUShort()
self.geometry.add_weight(Geometry.Weight(joint_a, weight_a))
self.geometry.add_weight(Geometry.Weight(joint_b, weight_b))
self.geometry.add_weight(Geometry.Weight(joint_c, weight_c))
self.geometry.add_weight(Geometry.Weight(joint_d, weight_d))
def _parse_materials(self):
materials_count = self.readUByte()
for x in range(materials_count):
material_name = self.readString()
self.readString()
triangles_count = self.readUShort()
inputs_count = self.readUByte()
vertex_index_length = self.readUByte()
triangles = []
for x1 in range(triangles_count):
triangles.append([
[
self.readUInteger(vertex_index_length) # Vertex
for _ in range(inputs_count)
] for _ in range(3) # 3 points
])
self.geometry.add_material(Geometry.Material(material_name, triangles))
def encode(self):
super().encode()
self.writeString(self.geometry.get_name())
self.writeString(self.geometry.get_group())
self._encode_vertices(self.geometry.get_vertices())
self._encode_skin()
self._encode_materials()
self.length = len(self.buffer)
def _encode_vertices(self, vertices):
self.writeUByte(len(vertices))
for vertex in vertices:
self.writeString(vertex.get_type())
self.writeUByte(vertex.get_index())
self.writeUByte(0) # sub_index
self.writeUByte(vertex.get_point_size())
self.writeFloat(vertex.get_scale())
self.writeUInt32(len(vertex.get_points()))
for point in vertex.get_points():
if vertex.get_type() == 'TEXCOORD':
point[1::2] = [1 - x for x in point[1::2]]
for coordinate in point:
# coordinate /= vertex['scale']
coordinate *= 32512
self.writeShort(round(coordinate))
def _encode_skin(self):
self.writeBool(self.geometry.has_controller())
if self.geometry.has_controller():
for x in self.geometry.get_bind_matrix():
self.writeFloat(x)
self._encode_joints()
self._encode_weight()
def _encode_joints(self):
if not self.geometry.has_controller():
self.writeUByte(0)
return
self.writeUByte(len(self.geometry.get_joints()))
for joint in self.geometry.get_joints():
self.writeString(joint.get_name())
for x in joint.get_matrix():
self.writeFloat(x)
def _encode_weight(self):
if not self.geometry.has_controller():
self.writeUInt32(0)
return
weights_quads = len(self.geometry.get_weights()) // 4
self.writeUInt32(weights_quads)
for quad_index in range(weights_quads):
quad = self.geometry.get_weights()[quad_index * 4:(quad_index + 1) * 4]
for weight in quad:
self.writeUByte(weight.get_joint_index())
for weight in quad:
self.writeNUShort(weight.get_strength())
def _encode_materials(self):
self.writeUByte(len(self.geometry.get_materials()))
for material in self.geometry.get_materials():
self.writeString(material.get_name())
self.writeString('')
self.writeUShort(len(material.get_triangles()))
# Calculate settings
inputs_count = len(material.get_triangles()[0][0])
maximal_value = 0
for triangle in material.get_triangles():
for point in triangle:
for vertex in point:
if vertex > maximal_value:
maximal_value = vertex
item_length = 1 if maximal_value <= 255 else 2
# Write Settings
self.writeUByte(inputs_count)
self.writeUByte(item_length)
# Write Polygons
for triangle in material.get_triangles():
for point in triangle:
for vertex in point:
self.writeUInteger(vertex, item_length) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/scw/chunks/geom.py | geom.py |
from . import Chunk
class MATE(Chunk):
def __init__(self, header):
super().__init__(header)
self.chunk_name = 'MATE'
def parse(self, buffer: bytes):
super().parse(buffer)
setattr(self, 'name', self.readString())
setattr(self, 'shader', self.readString())
setattr(self, 'v1', self.readUByte())
setattr(self, 'v2', self.readUByte())
effect = {}
a = self.readUByte()
r = self.readUByte()
g = self.readUByte()
b = self.readUByte()
ambient_color = (r, g, b, a)
effect['ambient'] = ambient_color
use_diffuse_tex = self.readBool()
if use_diffuse_tex:
diffuse_tex = self.readString()
effect['diffuse'] = diffuse_tex
else:
a = self.readUByte()
r = self.readUByte()
g = self.readUByte()
b = self.readUByte()
diffuse_color = (r, g, b, a)
effect['diffuse'] = diffuse_color
use_specular_tex = self.readBool()
if use_specular_tex:
specular_tex = self.readString()
effect['specular'] = specular_tex
else:
a = self.readUByte()
r = self.readUByte()
g = self.readUByte()
b = self.readUByte()
specular_color = (r, g, b, a)
effect['specular'] = specular_color
setattr(self, 'v3', self.readString())
setattr(self, 'v4', self.readString())
use_colorize_tex = self.readBool()
if use_colorize_tex:
colorize_tex = self.readString()
effect['colorize'] = colorize_tex
else:
a = self.readUByte()
r = self.readUByte()
g = self.readUByte()
b = self.readUByte()
colorize_color = (r, g, b, a)
effect['colorize'] = colorize_color
use_emission_tex = self.readBool()
if use_emission_tex:
emission_tex = self.readString()
effect['emission'] = emission_tex
else:
a = self.readUByte()
r = self.readUByte()
g = self.readUByte()
b = self.readUByte()
emission_color = (r, g, b, a)
effect['emission'] = emission_color
setattr(self, 'opacity_texture', self.readString())
setattr(self, 'v5', self.readFloat())
setattr(self, 'v6', self.readFloat())
effect['lightmaps'] = {
'diffuse': self.readString(),
'specular': self.readString()
}
if self.header.version == 2:
setattr(self, 'v7', self.readString())
shader_define_flags = self.readUInt32()
effect['shader_define_flags'] = shader_define_flags
if shader_define_flags & 32768:
self.readFloat()
self.readFloat()
self.readFloat()
self.readFloat()
setattr(self, 'effect', effect)
def encode(self):
super().encode()
self.writeString(getattr(self, 'name'))
self.writeString(getattr(self, 'shader'))
self.writeUByte(4) # getattr(self, 'v1')
self.writeUByte(0) # getattr(self, 'v2')
effect = getattr(self, 'effect')
r, g, b, a = effect['ambient']
self.writeUByte(a)
self.writeUByte(r)
self.writeUByte(g)
self.writeUByte(b)
use_diffuse_tex = type(effect['diffuse']) is str
self.writeBool(use_diffuse_tex)
if use_diffuse_tex:
self.writeString(effect['diffuse'])
else:
r, g, b, a = effect['diffuse']
self.writeUByte(a)
self.writeUByte(r)
self.writeUByte(g)
self.writeUByte(b)
use_specular_tex = type(effect['specular']) is str
self.writeBool(use_specular_tex)
if use_specular_tex:
self.writeString(effect['specular'])
else:
r, g, b, a = effect['specular']
self.writeUByte(a)
self.writeUByte(r)
self.writeUByte(g)
self.writeUByte(b)
self.writeString('.') # getattr(self, 'v3')
self.writeString('') # getattr(self, 'v4')
use_colorize_tex = type(effect['colorize']) is str
self.writeBool(use_colorize_tex)
if use_colorize_tex:
self.writeString(effect['colorize'])
else:
r, g, b, a = effect['colorize']
self.writeUByte(a)
self.writeUByte(r)
self.writeUByte(g)
self.writeUByte(b)
use_emission_tex = type(effect['emission']) is str
self.writeBool(use_emission_tex)
if use_emission_tex:
self.writeString(effect['emission'])
else:
r, g, b, a = effect['emission']
self.writeUByte(a)
self.writeUByte(r)
self.writeUByte(g)
self.writeUByte(b)
self.writeString('') # getattr(self, 'opacity_texture')
self.writeFloat(1) # getattr(self, 'v5')
self.writeFloat(0) # getattr(self, 'v6')
self.writeString(effect['lightmaps']['diffuse'])
self.writeString(effect['lightmaps']['specular'])
if self.header['version'] == 2:
self.writeString('') # getattr(self, 'v7')
self.writeUInt32(effect['shader_define_flags'])
self.length = len(self.buffer) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/scw/chunks/mate.py | mate.py |
from xml.etree.ElementTree import *
from .collada import Collada
from ..universal import Scene, Node, Geometry
from ...interfaces import WriterInterface
from ...utilities.matrix.matrix4x4 import Matrix4x4
class Writer(WriterInterface):
def __init__(self):
self.writen = None
self.dae = Collada()
self.library_materials = None
self.library_effects = None
self.library_images = None
self.library_geometries = None
self.library_controllers = None
self.library_animations = None
self.library_cameras = None
self.library_visual_scenes = None
def create_libraries(self):
self.library_materials = SubElement(self.dae.root, 'library_materials')
self.library_effects = SubElement(self.dae.root, 'library_effects')
# self.library_images = SubElement(dae.collada, 'library_images')
self.library_geometries = SubElement(self.dae.root, 'library_geometries')
self.library_controllers = SubElement(self.dae.root, 'library_controllers')
self.library_animations = SubElement(self.dae.root, 'library_animations')
# self.library_cameras = SubElement(self.dae.collada, 'library_cameras')
self.library_visual_scenes = SubElement(self.dae.root, 'library_visual_scenes')
def sign(self):
asset = SubElement(self.dae.root, 'asset')
contributor = SubElement(asset, 'contributor')
SubElement(contributor, 'author').text = 'Vorono4ka'
SubElement(contributor, 'authoring_tool').text = 'models_converter (https://github.com/vorono4ka/3d-converter)'
return contributor
def write(self, scene: Scene):
contributor = self.sign()
# if 'version' in data['header']:
# SubElement(contributor, 'comments').text = 'Version: ' + str(data['header']['version'])
self.create_libraries()
# for material in scene.get_materials():
# self.create_material(material)
for geometry in scene.get_geometries():
geometry_name = self.create_geometry(geometry)
if geometry.has_controller():
self.create_controller(geometry_name, geometry)
self.create_scene(scene)
self.writen = tostring(self.dae.root, xml_declaration=True).decode()
def create_material(self, material_data):
material_name = material_data['name']
SubElement(self.library_materials, 'material', id=material_name)
effect_name = f'{material_name}-effect'
material = SubElement(self.library_materials, 'material', id=material_name)
SubElement(material, 'instance_effect', url=f'#{effect_name}')
effect = SubElement(self.library_effects, 'effect', id=effect_name)
profile = SubElement(effect, 'profile_COMMON')
technique = SubElement(profile, 'technique', sid='common')
ambient_data = material_data['effect']['ambient']
diffuse_data = material_data['effect']['diffuse']
emission_data = material_data['effect']['emission']
specular_data = material_data['effect']['specular']
phong = SubElement(technique, 'phong')
if type(ambient_data) is list:
ambient = SubElement(phong, 'ambient')
ambient_data[3] /= 255
ambient_data = [str(item) for item in ambient_data]
SubElement(ambient, 'color').text = ' '.join(ambient_data)
# else:
# SubElement(ambient, 'texture', texture=ambient_data, texcoord='CHANNEL0')
if type(diffuse_data) is list:
diffuse = SubElement(phong, 'diffuse')
diffuse_data[3] /= 255
diffuse_data = [str(item) for item in diffuse_data]
SubElement(diffuse, 'color').text = ' '.join(diffuse_data)
# else:
# SubElement(diffuse, 'texture', texture=diffuse_data, texcoord='CHANNEL0')
if type(emission_data) is list:
emission = SubElement(phong, 'emission')
emission_data[3] /= 255
emission_data = [str(item) for item in emission_data]
SubElement(emission, 'color').text = ' '.join(emission_data)
# else:
# SubElement(emission, 'texture', texture=emission_data, texcoord='CHANNEL0')
if type(specular_data) is list:
specular = SubElement(phong, 'specular')
specular_data[3] /= 255
specular_data = [str(item) for item in specular_data]
SubElement(specular, 'color').text = ' '.join(specular_data)
# else:
# SubElement(specular, 'texture', texture=specular_data, texcoord='CHANNEL0')
def create_geometry(self, geometry: Geometry):
geometry_name = geometry.get_name()
collada_geometry = SubElement(self.library_geometries, 'geometry', id=f'{geometry_name}-geom')
mesh = SubElement(collada_geometry, 'mesh')
for vertex in geometry.get_vertices():
params = []
vertex_type = vertex.get_type()
vertex_name = vertex.get_name()
coordinate = vertex.get_points()
stride = len(coordinate[0])
if vertex_type == 'VERTEX':
vertex_type = 'POSITION'
source_name = f'{geometry_name}-{vertex_name}'
if vertex_type in ['POSITION', 'NORMAL']:
params.append({'name': 'X', 'type': 'float'})
params.append({'name': 'Y', 'type': 'float'})
params.append({'name': 'Z', 'type': 'float'})
elif vertex_type in ['TEXCOORD']:
params.append({'name': 'S', 'type': 'float'})
params.append({'name': 'T', 'type': 'float'})
self.dae.write_source(
mesh,
source_name,
'float_array',
tuple(' '.join(str(sub_item * vertex.get_scale()) for sub_item in item) for item in coordinate),
stride,
params
)
if vertex_type == 'POSITION':
vertices = SubElement(mesh, 'vertices', id=f'{source_name}-vertices')
self.dae.write_input(vertices, 'POSITION', source_name)
for material in geometry.get_materials():
collada_triangles = SubElement(mesh, 'triangles',
count=f'{len(material.get_triangles())}',
material=material.get_name())
inputs_count = len(material.get_triangles()[0][0])
for vertex_index in range(len(geometry.get_vertices())):
if vertex_index == inputs_count:
break
vertex = geometry.get_vertices()[vertex_index]
input_type = vertex.get_type()
if input_type == 'POSITION':
input_type = 'VERTEX'
source_id = f'{geometry_name}-{vertex.get_name()}'
if input_type == 'VERTEX':
source_id = f'{source_id}-vertices'
self.dae.write_input(collada_triangles, input_type, source_id, vertex_index)
polygons = SubElement(collada_triangles, 'p')
formatted_polygons_data = []
for triangle in material.get_triangles():
for point in triangle:
for coordinate in point:
formatted_polygons_data.append(str(coordinate))
polygons.text = ' '.join(formatted_polygons_data)
return geometry_name
def create_controller(self, geometry_name: str, geometry: Geometry):
controller = SubElement(self.library_controllers, 'controller', id=f'{geometry_name}-cont')
skin = SubElement(controller, 'skin', source=f'#{geometry_name}-geom')
SubElement(skin, 'bind_shape_matrix').text = ' '.join(map(str, geometry.get_bind_matrix()))
joints_names_source_id = f'{geometry_name}-joints'
joints_matrices_source_id = f'{geometry_name}-joints-bind-matrices'
weights_source_id = f'{geometry_name}-weights'
self.dae.write_source(
skin,
joints_names_source_id,
'Name_array',
tuple(joint.get_name() for joint in geometry.get_joints()),
1,
[{'name': 'JOINT', 'type': 'name'}]
)
self.dae.write_source(
skin,
joints_matrices_source_id,
'float_array',
tuple(' '.join(map(str, joint.get_matrix())) for joint in geometry.get_joints()),
16,
[{'name': 'TRANSFORM', 'type': 'float4x4'}]
)
vertex_weights = []
unique_weights = []
vcount = [0] * (len(geometry.get_weights()) // 4)
for weight_index in range(len(geometry.get_weights())):
weight = geometry.get_weights()[weight_index]
if weight.get_strength() == 0:
continue
vcount[weight_index // 4] += 1
vertex_weights.append(weight.get_joint_index())
if weight.get_strength() in unique_weights:
vertex_weights.append(unique_weights.index(weight.get_strength()))
else:
unique_weights.append(weight.get_strength())
vertex_weights.append(len(unique_weights) - 1)
self.dae.write_source(
skin,
weights_source_id,
'float_array',
tuple(map(str, unique_weights)),
1,
[{'name': 'WEIGHT', 'type': 'float'}]
)
joints = SubElement(skin, 'joints')
self.dae.write_input(joints, 'JOINT', joints_names_source_id)
self.dae.write_input(joints, 'INV_BIND_MATRIX', joints_matrices_source_id)
collada_vertex_weights = SubElement(skin, 'vertex_weights', count=f'{len(vcount)}')
self.dae.write_input(collada_vertex_weights, 'JOINT', joints_names_source_id, 0)
self.dae.write_input(collada_vertex_weights, 'WEIGHT', weights_source_id, 1)
SubElement(collada_vertex_weights, 'vcount').text = ' '.join(map(str, vcount))
SubElement(collada_vertex_weights, 'v').text = ' '.join(map(str, vertex_weights))
def create_animation(self, node_name, frames, matrix_output, time_input):
animation = SubElement(self.library_animations, 'animation', id=node_name)
self.dae.write_source(
animation,
f'{node_name}-time-input',
'float_array',
time_input,
1,
[{'name': 'TIME', 'type': 'float'}]
)
self.dae.write_source(
animation,
f'{node_name}-matrix-output',
'float_array',
matrix_output,
16,
[{'name': 'TRANSFORM', 'type': 'float4x4'}]
)
self.dae.write_source(
animation,
f'{node_name}-interpolation',
'Name_array',
tuple('LINEAR' for _ in range(len(frames))),
1,
[{'name': 'INTERPOLATION', 'type': 'name'}]
)
sampler = SubElement(animation, 'sampler', id=f'{node_name}-sampler')
self.dae.write_input(
sampler,
'INPUT',
f'{node_name}-time-input'
)
self.dae.write_input(
sampler,
'OUTPUT',
f'{node_name}-matrix-output'
)
self.dae.write_input(
sampler,
'INTERPOLATION',
f'{node_name}-interpolation'
)
SubElement(animation, 'channel',
source=f'#{node_name}-sampler',
target=f'{node_name}/transform')
def create_scene(self, scene: Scene):
visual_scene = SubElement(self.library_visual_scenes, 'visual_scene',
id='3dConverterScene',
name='3d-Converter Scene')
not_joint_nodes = []
node_index = 0
parent_name = None
while node_index < len(not_joint_nodes) or len(not_joint_nodes) == 0:
if len(not_joint_nodes) > 0:
parent_name = not_joint_nodes[node_index]
for _node in scene.get_nodes():
if _node.get_instances() or _node.get_name() == parent_name:
if not (_node.get_name() in not_joint_nodes):
not_joint_nodes.append(_node.get_name())
if not (_node.get_parent() in not_joint_nodes):
not_joint_nodes.append(_node.get_parent())
node_index += 1
for node in scene.get_nodes():
self.create_node(visual_scene, node, scene, not_joint_nodes)
collada_scene = SubElement(self.dae.root, 'scene')
SubElement(collada_scene, 'instance_visual_scene',
url='#3dConverterScene',
name='3d-Converter Scene')
def create_node(self, visual_scene, node: Node, scene: Scene, not_joint_nodes):
parent_name = node.get_parent()
parent = visual_scene
if parent_name != '':
parent = visual_scene.find(f'.//*[@id="{parent_name}"]')
if parent is None:
parent = visual_scene
node_name = node.get_name()
collada_node = SubElement(parent, 'node', id=node.get_name())
for instance in node.get_instances():
bind_material = None
instance_type = instance.get_type()
if instance_type == 'CONT':
instance_controller = SubElement(collada_node, 'instance_controller',
url=f'#{instance.get_name()}-cont')
bind_material = SubElement(instance_controller, 'bind_material')
elif instance_type == 'GEOM':
instance_controller = SubElement(collada_node, 'instance_geometry', url=f'#{instance.get_name()}-geom')
bind_material = SubElement(instance_controller, 'bind_material')
if instance_type in ['GEOM', 'CONT']:
technique_common = SubElement(bind_material, 'technique_common')
for bind in instance.get_binds():
SubElement(technique_common, 'instance_material',
symbol=bind.get_symbol(),
target=f'#{bind.get_target()}')
else:
if not (node.get_name() in not_joint_nodes):
collada_node.attrib['type'] = 'JOINT'
time_input = []
matrix_output = []
for frame_index in range(len(node.get_frames())):
frame = node.get_frames()[frame_index]
frame_id = frame.get_id()
matrix = Matrix4x4(size=(4, 4))
time_input.append(str(frame_id / scene.get_frame_rate()))
matrix.put_rotation(frame.get_rotation())
matrix.put_position(frame.get_position())
matrix.put_scale(frame.get_scale())
matrix = matrix.translation_matrix @ matrix.rotation_matrix @ matrix.scale_matrix
matrix_values = []
for row in matrix.matrix:
for column in row:
matrix_values.append(str(column))
if frame_index == 0:
SubElement(collada_node, 'matrix', sid='transform').text = ' '.join(matrix_values)
matrix_output.append(' '.join(matrix_values))
if len(node.get_frames()) > 1:
self.create_animation(node_name, node.get_frames(), matrix_output, time_input) | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/collada/writer.py | writer.py |
from xml.etree.ElementTree import *
from .collada import NAMESPACE
from ..universal import Node, Scene, Geometry
from ...interfaces import ParserInterface
from ...utilities import remove_suffix
from ...utilities.matrix.matrix4x4 import Matrix4x4
NAMESPACES = {
'collada': NAMESPACE
}
class Parser(ParserInterface):
def __init__(self, file_data):
self.library_materials = None
self.library_effects = None
self.library_geometries = None
self.library_controllers = None
self.instance_scene = None
self.library_scenes = None
self.scene = Scene()
root = fromstring(file_data)
self.find_libraries(root)
self.instance_scene = root.find('./collada:scene', NAMESPACES).find('collada:instance_visual_scene', NAMESPACES)
def find_libraries(self, root):
self.library_materials = root.find('./collada:library_materials', NAMESPACES)
if self.library_materials is None:
self.library_materials = []
self.library_effects = root.find('./collada:library_effects', NAMESPACES)
self.library_geometries = root.find('./collada:library_geometries', NAMESPACES)
self.library_controllers = root.find('./collada:library_controllers', NAMESPACES)
self.library_scenes = root.find('./collada:library_visual_scenes', NAMESPACES)
def parse(self):
self.parse_materials()
scene_url = self.instance_scene.attrib['url'][1:]
scene = self.library_scenes.find(f'collada:visual_scene[@id="{scene_url}"]', NAMESPACES)
self.parse_node(scene.findall('collada:node', NAMESPACES))
self.parse_nodes()
def parse_materials(self):
for material in self.library_materials:
material_name = material.attrib['name']
instance_effect = material.find('collada:instance_effect', NAMESPACES)
if instance_effect is not None:
effect_url = instance_effect.attrib['url'][1:]
effect = self.library_effects.find(f'collada:effect[@id="{effect_url}"]', NAMESPACES)
if effect is not None:
# profile = None
# for item in effect:
# if 'profile' in item.tag:
# profile = item
# technique = profile.find('collada:technique', NAMESPACES)
#
# emission_data = None
# ambient_data = None
# diffuse_data = None
#
# emission = technique[0].find('collada:emission', NAMESPACES)
# ambient = technique[0].find('collada:ambient', NAMESPACES)
# diffuse = technique[0].find('collada:diffuse', NAMESPACES)
#
# if 'color' in emission[0].tag:
# emission_data = [float(item) for item in emission[0].text.split()]
# emission_data[3] *= 255
# elif 'texture' in emission[0].tag:
# # emission_data = emission[0].attrib['texture']
# emission_data = '.'
#
# if 'color' in ambient[0].tag:
# ambient_data = [float(item) for item in ambient[0].text.split()]
# ambient_data[3] *= 255
# elif 'texture' in ambient[0].tag:
# # ambient_data = ambient[0].attrib['texture']
# ambient_data = '.'
#
# if 'color' in diffuse[0].tag:
# diffuse_data = [float(item) for item in diffuse[0].text.split()]
# diffuse_data[3] *= 255
# elif 'texture' in diffuse[0].tag:
# # diffuse_data = diffuse[0].attrib['texture']
# diffuse_data = '.'
material_data = {
'name': material_name,
'shader': 'shader/uber.vsh',
'effect': {
'ambient': [0, 0, 0, 255], # ambient_data,
'diffuse': '.', # diffuse_data,
'specular': '.',
'colorize': [255, 255, 255, 255],
'emission': [0, 0, 0, 255], # emission_data,
'lightmaps': {
'diffuse': 'sc3d/diffuse_lightmap.png',
'specular': 'sc3d/specular_lightmap.png'
},
'shader_define_flags': 3014
}
}
self.scene.add_material(material_data)
def parse_node(self, xml_nodes: list, parent: str = None):
for xml_node in xml_nodes:
if not ('name' in xml_node.attrib):
xml_node.attrib['name'] = xml_node.attrib['id']
node: Node = Node(
name=xml_node.attrib['name'],
parent=parent
)
instance_geometry = xml_node.findall('collada:instance_geometry', NAMESPACES)
instance_controller = xml_node.findall('collada:instance_controller', NAMESPACES)
for xml_instance in [*instance_geometry, *instance_controller]:
if instance_geometry:
instance = Node.Instance(name=xml_instance.attrib['url'][1:], instance_type='GEOM')
elif instance_controller:
instance = Node.Instance(name=xml_instance.attrib['url'][1:], instance_type='CONT')
else:
continue
bind_material = xml_instance.find('collada:bind_material', NAMESPACES)
technique_common = bind_material[0]
for instance_material in technique_common:
instance.add_bind(instance_material.attrib['symbol'], instance_material.attrib['target'][1:])
node.add_instance(instance)
xml_matrix = xml_node.findall('collada:matrix', NAMESPACES)
if xml_matrix:
matrix = xml_matrix[0].text.split()
matrix = [[float(value) for value in matrix[x:x + 4]] for x in range(0, len(matrix), 4)]
matrix = Matrix4x4(matrix=matrix)
scale = matrix.get_scale()
position = matrix.get_position()
rotation = matrix.get_rotation()
node.add_frame(Node.Frame(0, position, scale, rotation))
self.scene.add_node(node)
self.parse_node(xml_node.findall('collada:node', NAMESPACES), node.get_name())
def parse_nodes(self):
nodes = self.scene.get_nodes()
for node_index in range(len(nodes)):
node = nodes[node_index]
for instance in node.get_instances():
controller = None
collada_geometry = None
if instance.get_type() == 'CONT':
controller = self.library_controllers.find(f'collada:controller[@id="{instance.get_name()}"]',
NAMESPACES)
geometry_url = controller[0].attrib['source'][1:]
collada_geometry = self.library_geometries.find(f'collada:geometry[@id="{geometry_url}"]',
NAMESPACES)
elif instance.get_type() == 'GEOM':
collada_geometry = self.library_geometries.find(f'collada:geometry[@id="{instance.get_name()}"]',
NAMESPACES)
if not ('name' in collada_geometry.attrib):
collada_geometry.attrib['name'] = collada_geometry.attrib['id']
instance._name = collada_geometry.attrib['name']
for suffix in ('-skin', '-cont'):
instance._name = remove_suffix(instance.get_name(), suffix)
for suffix in ('-mesh', '-geom'):
instance._name = remove_suffix(instance.get_name(), suffix)
if collada_geometry is not None:
geometry = self.parse_geometry(collada_geometry)
if controller is not None:
self.parse_controller(controller, geometry)
def parse_controller(self, collada_controller, geometry: Geometry):
skin = collada_controller[0]
bind_shape_matrix = skin.find('collada:bind_shape_matrix', NAMESPACES).text
geometry.set_controller_bind_matrix(list(map(float, bind_shape_matrix.split())))
joints = skin.find('collada:joints', NAMESPACES)
joint_inputs = joints.findall('collada:input', NAMESPACES)
for _input in joint_inputs:
# semantic = _input.attrib['semantic']
source_url = _input.attrib['source']
source = skin.find(f'collada:source[@id="{source_url[1:]}"]', NAMESPACES)
accessor = source.find('collada:technique_common/collada:accessor', NAMESPACES)
accessor_stride = int(accessor.attrib['stride'])
accessor_source_url = accessor.attrib['source']
accessor_source = source.find(f'collada:*[@id="{accessor_source_url[1:]}"]', NAMESPACES)
params = accessor.findall('collada:param', NAMESPACES)
for param in params:
param_name = param.attrib['name']
# param_type = param.attrib['type']
source_data = accessor_source.text.split()
if param_name == 'JOINT':
for name in source_data:
geometry.add_joint(Geometry.Joint(name, None))
if param_name == 'TRANSFORM':
for x in range(int(accessor_source.attrib['count']) // int(accessor_stride)):
matrix = []
for y in source_data[x * accessor_stride:(x + 1) * accessor_stride]:
matrix.append(float(y))
geometry.get_joints()[x].set_matrix(matrix)
vertex_weights = skin.find('collada:vertex_weights', NAMESPACES)
vertex_weights_inputs = vertex_weights.findall('collada:input', NAMESPACES)
for _input in vertex_weights_inputs:
semantic = _input.attrib['semantic']
source_url = _input.attrib['source']
source = skin.find(f'collada:source[@id="{source_url[1:]}"]', NAMESPACES)
if semantic == 'WEIGHT':
accessor = source.find('collada:technique_common/collada:accessor', NAMESPACES)
accessor_source_url = accessor.attrib['source']
accessor_source = source.find(f'collada:*[@id="{accessor_source_url[1:]}"]', NAMESPACES)
weights = None
params = accessor.findall('collada:param', NAMESPACES)
for param in params:
param_name = param.attrib['name']
# param_type = param.attrib['type']
if param_name == 'WEIGHT':
weights = [float(x) for x in accessor_source.text.split()]
break
if weights is None:
continue
vcount = vertex_weights.find('collada:vcount', NAMESPACES).text
v = vertex_weights.find('collada:v', NAMESPACES).text
v = map(int, v.split())
for count in map(int, vcount.split()):
for i in range(count):
joint_index = next(v)
strength_index = next(v)
geometry.add_weight(Geometry.Weight(joint_index, weights[strength_index]))
while count < 4:
geometry.add_weight(Geometry.Weight(0, 0))
count += 1
break
def parse_geometry(self, collada_geometry) -> Geometry:
name = collada_geometry.attrib['name']
for suffix in ('-mesh', '-geom'):
name = remove_suffix(name, suffix)
geometry = Geometry(name=name, group='GEO')
mesh = collada_geometry[0]
triangles = mesh.findall('collada:triangles', NAMESPACES)
if triangles:
pass
else:
triangles = mesh.findall('collada:polylist', NAMESPACES)
inputs = triangles[0].findall('collada:input', NAMESPACES)
for _input in inputs:
semantic = _input.attrib['semantic']
source_link = _input.attrib['source'][1:]
source = mesh.find(f'*[@id="{source_link}"]')
if semantic == 'VERTEX':
vertices_input = source[0]
semantic = vertices_input.attrib['semantic']
source_link = vertices_input.attrib['source'][1:]
source = mesh.find(f'*[@id="{source_link}"]')
float_array = source.find('collada:float_array', NAMESPACES)
accessor = source.find('collada:technique_common/collada:accessor', NAMESPACES)
points_temp = [float(floating) for floating in float_array.text.split()]
scale = max(max(points_temp), abs(min(points_temp)))
if scale < 1:
scale = 1
if semantic == 'TEXCOORD':
points_temp[1::2] = [1 - x for x in points_temp[1::2]]
points_temp = [value / scale for value in points_temp]
points = []
for x in range(0, len(points_temp), len(accessor)):
points.append(points_temp[x: x + len(accessor)])
geometry.add_vertex(Geometry.Vertex(
name='',
vertex_type=semantic,
vertex_index=len(geometry.get_vertices()),
vertex_scale=scale,
points=points
))
for triangle in triangles:
triangles_material = triangle.attrib['material']
p = triangle.find('collada:p', NAMESPACES)
triangles_temp = [int(integer) for integer in p.text.split()]
triangles = [
[
triangles_temp[polygon_index + point_index:polygon_index + point_index + 3]
for point_index in range(0, len(inputs) * 3, 3)
] for polygon_index in range(0, len(triangles_temp), len(inputs) * 3)
]
geometry.add_material(Geometry.Material(name=triangles_material, triangles=triangles))
self.scene.add_geometry(geometry)
return geometry | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/collada/parser.py | parser.py |
from models_converter.formats.universal import Scene
from models_converter.interfaces import WriterInterface
class Writer(WriterInterface):
def __init__(self):
self.writen = ''
self.temp_vertices_offsets = {
'POSITION': 0,
'TEXCOORD': 0,
'NORMAL': 0
}
self.vertices_offsets = {
'POSITION': 0,
'TEXCOORD': 0,
'NORMAL': 0
}
def write(self, scene: Scene):
for geometry in scene.get_geometries():
for key in self.vertices_offsets.keys():
self.vertices_offsets[key] = self.temp_vertices_offsets[key]
prefix = ''
for vertex in geometry.get_vertices():
if vertex.get_type() == 'POSITION':
prefix = 'v '
elif vertex.get_type() == 'NORMAL':
prefix = 'vn '
elif vertex.get_type() == 'TEXCOORD':
prefix = 'vt '
self.temp_vertices_offsets[vertex.get_type()] += len(vertex.get_points())
for triangle in vertex.get_points():
temp_string = prefix
for point in triangle:
temp_string += str(point * vertex.get_scale()) + ' '
self.writen += f'{temp_string}\n'
self.writen += '\n\n'
for material in geometry.get_materials():
self.writen += f'o {geometry.get_name()}|{material.get_name()}\n\n'
for triangle in material.get_triangles():
temp_string = 'f '
for point in triangle:
temp_list = [
str(point[0] + self.vertices_offsets['POSITION'] + 1), # POSITION
str(point[2] + self.vertices_offsets['TEXCOORD'] + 1), # TEXCOORD
str(point[1] + self.vertices_offsets['NORMAL'] + 1) # NORMAL
]
temp_string += '/'.join(temp_list) + ' '
self.writen += f'{temp_string}\n'
self.writen += '\n\n' | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/wavefront/writer.py | writer.py |
from models_converter.formats.universal import Scene, Node, Geometry
from models_converter.interfaces import ParserInterface
from models_converter.utilities.math import Vector3, Quaternion
class Parser(ParserInterface):
def __init__(self, file_data: bytes or str):
if type(file_data) is bytes:
file_data = file_data.decode()
self.scene = Scene()
self.lines = file_data.split('\n')
self.position_temp, self.position = [], []
self.normals_temp, self.normals = [], []
self.texcoord_temp, self.texcoord = [], []
def parse(self):
triangles = []
geometry_name = None
material = 'character_mat'
position_scale, normals_scale, texcoord_scale = 1, 1, 1
vertices_offsets = {
'POSITION': 0,
'TEXCOORD': 0,
'NORMAL': 0
}
names = [line[2:].split('|')[0]
for line in list(filter(lambda line: line.startswith('o '), self.lines))]
for line_index in range(len(self.lines)):
line = self.lines[line_index]
items = line.split()[1:]
if line.startswith('v '): # POSITION
for item in items:
self.position_temp.append(float(item))
elif line.startswith('vn '): # NORMAL
for item in items:
self.normals_temp.append(float(item))
elif line.startswith('vt '): # TEXCOORD
if len(items) > 2:
items = items[:-1]
for item in items:
self.texcoord_temp.append(float(item))
elif line.startswith('f '):
temp_list = []
if len(items) > 3:
raise ValueError('It is necessary to triangulate the model')
for item in items:
second_temp_list = []
if len(item.split('/')) == 2:
raise ValueError('Model have not normals or texture')
elif len(item.split('/')) == 1:
raise ValueError('Model have not normals and texture')
for x in item.split('/'):
second_temp_list.append(int(x) - 1)
temp_list.append([second_temp_list[0] - vertices_offsets['POSITION'],
second_temp_list[2] - vertices_offsets['TEXCOORD'],
second_temp_list[1] - vertices_offsets['NORMAL']])
triangles.append(temp_list)
elif line.startswith('o '):
geometry_name = items[0]
if '|' in items[0]:
geometry_name, material = items[0].split('|')
if self.position_temp:
self.position = []
position_scale = self._get_vertex_scale(self.position_temp)
for x in range(0, len(self.position_temp), 3):
self.position.append([vertex / position_scale for vertex in self.position_temp[x: x + 3]])
if self.normals_temp:
self.normals = []
normals_scale = self._get_vertex_scale(self.normals_temp)
for x in range(0, len(self.normals_temp), 3):
self.normals.append([vertex / normals_scale for vertex in self.normals_temp[x: x + 3]])
if self.texcoord_temp:
self.texcoord = []
texcoord_scale = self._get_vertex_scale(self.texcoord_temp)
for x in range(0, len(self.texcoord_temp), 2):
self.texcoord.append([vertex / texcoord_scale for vertex in self.texcoord_temp[x: x + 2]])
if not line.startswith('f ') and triangles and geometry_name and \
self.position and self.normals and self.texcoord:
self.position_temp = []
self.normals_temp = []
self.texcoord_temp = []
if len(names) > len(self.scene.get_geometries()) + 1 and \
names[len(self.scene.get_geometries()) + 1] != geometry_name:
vertices_offsets['POSITION'] += len(self.position)
vertices_offsets['TEXCOORD'] += len(self.normals)
vertices_offsets['NORMAL'] += len(self.texcoord)
if not (self.scene.get_geometries() and self.scene.get_geometries()[-1].get_name() == geometry_name):
geometry = Geometry(name=geometry_name, group='GEO')
geometry.add_vertex(Geometry.Vertex(
name='position_0',
vertex_type='POSITION',
vertex_index=0,
vertex_scale=position_scale,
points=self.position
))
geometry.add_vertex(Geometry.Vertex(
name='normal_0',
vertex_type='NORMAL',
vertex_index=1,
vertex_scale=normals_scale,
points=self.normals
))
geometry.add_vertex(Geometry.Vertex(
name='texcoord_0',
vertex_type='TEXCOORD',
vertex_index=2,
vertex_scale=texcoord_scale,
points=self.texcoord
))
self.scene.add_geometry(geometry)
self.scene.get_geometries()[-1].add_material(
Geometry.Material(material, triangles)
)
material = 'character_mat'
triangles = []
for geometry in self.scene.get_geometries():
node = Node(name=geometry.get_name(), parent='')
instance = Node.Instance(name=geometry.get_name(), instance_type='GEOM')
for material in geometry.get_materials():
instance.add_bind(material.get_name(), material.get_name())
node.add_instance(instance)
node.add_frame(Node.Frame(0, Vector3(), Vector3(1, 1, 1), Quaternion()))
self.scene.add_node(node)
@staticmethod
def _get_vertex_scale(vertex_data: list):
vertex_scale = max(max(vertex_data), abs(min(vertex_data)))
if vertex_scale < 1:
vertex_scale = 1
return vertex_scale | 3d-converter | /3d-converter-0.9.0.tar.gz/3d-converter-0.9.0/models_converter/formats/wavefront/parser.py | parser.py |
import pygame
from math import radians, sin, cos
class CameraOrtho:
def __init__(self, angle=(0, 0), shift=(0, 0), size=5):
"""
Initializes camera.
:param angle: Angle of the camera in (latitude, longitude)
:type angle: Tuple[float, float]
:param shift: Shift of camera in (x, y)
:type shift: Tuple[float, float]
:param size: Orthographic size of camera.
:type size: float
"""
self.angle = angle
self.shift = shift
self.size = size
def render(self, mesh, resolution, bg_col=(0, 0, 0, 0)):
"""
Renders mesh as a pygame surface.
:param mesh: Mesh to render.
:type mesh: renderer.Mesh
:param resolution: Resolution to render in (x, y). The Y size will be adjusted, and the X size will be the ortho size.
:type resolution: Tuple[int, int]
:param bg_col: Background color of render in (R, G, B, [A])
:type bg_col: Tuple[int]
"""
surface = pygame.Surface(resolution, pygame.SRCALPHA)
surface.fill(bg_col)
data = mesh.setup_render()
for face in data:
color = face[0]
locs = []
for vert in face[1]:
locs.append(self.project(vert, resolution))
pygame.draw.polygon(surface, color, locs)
return surface
def project(self, vert, res):
size_hor = self.size
size_ver = self.size / res[0] * res[1]
loc_x, loc_y, loc_z = vert
view_ver = radians(self.angle[0])
view_hor = radians(self.angle[1])
px_hor = loc_x*cos(view_hor) + loc_y*sin(view_hor)
px_hor *= res[0] / size_hor
px_hor += res[0] / 2
px_ver = loc_z*sin(view_ver) - loc_x*sin(view_hor)*cos(view_ver) + loc_y*cos(view_hor)*cos(view_ver)
px_ver *= res[1] / size_ver
px_ver += res[1] / 2
return (px_hor, px_ver)
class Mesh:
def __init__(self, verts, faces, colors):
"""
Initializes mesh.
:param verts: Tuple of (x, y, z) locations.
:type verts: Tuple[Tuple[float, float, float]]
:param faces: Tuple of vertex indexes.
:type faces: Tuple[Tuple[int]]
:param colors: Colors (r, g, b) corresponding to each face.
:type colors: Tuple[Tuple[int, int, int]]
"""
self.set_mesh(verts, faces, colors)
def set_mesh(self, verts, faces, colors):
"""
Initializes mesh.
:param verts: Tuple of (x, y, z) locations.
:type verts: Tuple[Tuple[float, float, float]]
:param faces: Tuple of vertex indexes.
:type faces: Tuple[Tuple[int]]
:param colors: Colors (r, g, b) corresponding to each face.
:type colors: Tuple[Tuple[int, int, int]]
"""
self.verts = verts
self.colors = colors
self.faces = []
for face in faces:
self.faces.append([verts[i] for i in face])
def setup_render(self):
"""
Returns a list that the renderer can understand. This should only be called by the renderer.
"""
faces = []
for i, face in enumerate(self.faces):
if len(face) == 3:
faces.append((self.colors[i], face))
else:
curr_color = self.colors[i]
for vert in range(1, len(face)-1):
faces.append((curr_color, (face[0], face[vert], face[vert+1])))
return faces | 3d-renderer | /3d_renderer-0.0.6-py3-none-any.whl/renderer/classes.py | classes.py |
# 3D Video Converter
A simple FFMPEG-based script for converting either two separate stereo videos or an existing 3D video into a wide range of 3D video formats.
## Installation
### Install `3d-video-converter`
From PyPI:
```bash
pip install 3d-video-converter
```
Or from the source on GitHub:
```bash
pip install "3d-video-converter @ git+https://github.com/evoth/3d-video-converter"
```
The package will be installed with the module name `video_converter_3d`.
### Install FFmpeg
This package depends on [ffmpeg-python](https://github.com/kkroening/ffmpeg-python), which means that [FFmpeg](https://ffmpeg.org/) must be installed and accessible via the `$PATH` environment variable. Please follow appropriate installation instructions for your platform.
To check if FFmpeg is installed, run the `ffmpeg` command from the terminal. If it is installed correctly, you should see version and build information.
## Usage examples
Convert a full-width parallel view video to full color red/cyan anaglyph:
```python
from video_converter_3d import convert_3d
convert_3d("video_parallel.mp4", "sbsl", "video_anaglyph.mp4", "arcc")
```
Combine two separate stereo videos into a full-width parallel view video, only keeping audio from the left video:
```python
from video_converter_3d import convert_2d_to_3d
convert_2d_to_3d(
"video_left.mp4",
"video_right.mp4",
True,
False,
"video_parallel.mp4",
"sbsl"
)
```
| 3d-video-converter | /3d-video-converter-0.0.4.tar.gz/3d-video-converter-0.0.4/README.md | README.md |
from typing import Any, Dict
import ffmpeg
from cv2 import CAP_PROP_FPS, VideoCapture
def convert_3d(
in_video: str,
in_type: str,
out_video: str,
out_type: str,
out_ffmpeg_options: Dict[str, Any] = {"c:v": "libx264", "crf": 18},
) -> None:
"""
Given a 3D video and its type of 3D, converts to a 3D video of a different
type (see https://ffmpeg.org/ffmpeg-filters.html#stereo3d for available
input and output type strings). For example, to convert a full-width
parallel view video to full color red/cyan anaglyph, the `in_type` would be
`"sbsl"` and the `out_type` would be `"arcc"`.
Optionally, export settings can be adjusted
via `out_ffmpeg_options`, which are passed to the ffmpeg-python output
function.
"""
stream = ffmpeg.input(in_video)
audio = stream.audio
if in_type != out_type:
filter_options = {"in": in_type, "out": out_type}
stream = ffmpeg.filter(stream, "stereo3d", **filter_options)
stream = ffmpeg.output(stream, audio, out_video, **out_ffmpeg_options)
ffmpeg.run(stream)
def convert_2d_to_3d(
in_video_left: str,
in_video_right: str,
use_audio_left: bool,
use_audio_right: bool,
out_video: str,
out_type: str,
out_ffmpeg_options: Dict[str, Any] = {"c:v": "libx264", "crf": 18},
offset: float = 0,
overwrite: bool = False,
) -> None:
"""
Given two separate stereo videos of identical dimensions and constant
framerates, combines into a 3D video of the specified type (see
https://ffmpeg.org/ffmpeg-filters.html#stereo3d for available output type
strings). For example, to combine the videos into a full-width parallel view
video, the `out_type` would be `"sbsl"`.
The audio from either or both videos may be used, depending on the value of
`use_audio_left` and `use_audio_right`. If both are `True`, mixes audio down
into mono or stereo, depending on the input files. However, if using audio
from both videos, there may be slight echoing artifacts.
Additionally, the offset between `in_video_left` and `in_video_right` can be
specified by setting `offset` to the number of seconds `in_video_right` is
delayed from `in_video_left` (or vice versa for a negative value).
Optionally, export settings can be adjusted
via `out_ffmpeg_options`, which are passed to the ffmpeg-python output
function. Set `overwrite` to `True` to automatically overwrite files.
"""
# Apply offset by trimming beginning of video that starts earlier
in_options_left = {}
if offset > 0:
in_options_left["ss"] = offset
stream_left = ffmpeg.input(in_video_left, **in_options_left)
in_options_right = {}
if offset < 0:
in_options_right["ss"] = -1 * offset
stream_right = ffmpeg.input(in_video_right, **in_options_right)
# Create parallel view video which can be converted to another type (or
# cross view if the output type is cross view)
if out_type == "sbsr":
stream = ffmpeg.filter(
[stream_right.video, stream_left.video], "hstack", inputs=2, shortest=1
)
else:
stream = ffmpeg.filter(
[stream_left.video, stream_right.video], "hstack", inputs=2, shortest=1
)
# Convert parallel view to another type
if out_type not in ["sbsl", "sbsr"]:
filter_options = {"in": "sbsl", "out": out_type}
stream = ffmpeg.filter(stream, "stereo3d", **filter_options)
# Process audio
if use_audio_left and use_audio_right:
audio = ffmpeg.filter(
[stream_left.audio, stream_right.audio], "amerge", inputs=2
)
out_ffmpeg_options["ac"] = 2
elif use_audio_left:
audio = stream_left.audio
elif use_audio_right:
audio = stream_right.audio
# Get framerate
cap = VideoCapture(in_video_left)
fps = cap.get(CAP_PROP_FPS)
# Configure output
out_ffmpeg_options["fps_mode"] = "cfr"
out_ffmpeg_options["r"] = fps
if use_audio_left or use_audio_right:
stream = ffmpeg.output(stream, audio, out_video, **out_ffmpeg_options)
else:
stream = ffmpeg.output(stream, out_video, **out_ffmpeg_options)
ffmpeg.run(stream, overwrite_output=overwrite) | 3d-video-converter | /3d-video-converter-0.0.4.tar.gz/3d-video-converter-0.0.4/video_converter_3d/converter.py | converter.py |
3D Wallet Generator
===================
This project helps you design and export 3D-printable wallets, similar to paper wallets (but they won't die in a flood)
-----------------------------------------------------------------------------------------------------------------------
Everyone who's seriously serious about bitcoin has tried paper wallet
generators. While the idea is great, paper isn't a great medium out of
which to make something that stores significant value. This this in
mind, we set out to make a simple, easy-to-use software that can design
and export 3D-printable wallets, with a variety of configuration
options.
Dependencies
------------
- Python3: this project is designed for Python3, not Python2
- PyBitcoin, ``sudo pip3 install bitcoin`` **(no manual installation required)**
- PyQRCode, ``sudo pip3 install pyqrcode`` **(no manual installation required)**
- OpenSCAD 2015 (or higher), just install from their website, and the
program should find it automatically (submit an issue if it doesn't) - **(manual installation required)**
Features
--------
- Supports a variety of configuration and size options
- Exports wallets as STL
- Export keys as CSV-file for import into other software (for big
batches)
- Set the configuration and let it generate millions of **random**
wallets for you
- Support for other cryptocurrencies, including:
- Bitcoin
- Litecoin
- Dogecoin
- Any other currency (as long as you know the version bit for address generation)
Instructions
------------
1. Install pip
- Windows: download from their website
- Mac: install from MacPorts or Brew
- Linux (Ubuntu/Debian): ``sudo apt-get install python3-pip``
2. Install OpenSCAD
- `Download from their website <http://openscad.org/downloads.html>`_
- Make sure you are running their newest version (or at least OpenSCAD 2015)
- Contact us if you need help.
3. Install our package
- Try: ``sudo pip3 install 3d-wallet-generator``
- If it continues to fail, shoot us an email and we'll try to help.
4. Use our package
- Run ``3dwallet -h`` to see your options
- Try the default settings by running `3dwallet` - it will output five wallets, with the default settings, into a folder in your current directory.
- Play with the other settings and decide how your printer, CNC, etc. likes the different styles.
- Film it or take a picture, and give it to us! We'll add it to our collection!
We recommend you run the Linux version off of a LiveUSB for maximum
security (just as you would with a normal paper wallet).
Miscellaneous
-------------
- If you have any comments, questions, or feature requests, either
submit an issue or contact us at btcspry@bitforwarder.com
- We always accept donations at
**1MF7hKShzq2iSV9ZZ9hEx6ATnHQpFtM7cF!!** Please donate, this project
took a bunch of effort and we want to make sure it was worth it.
To Do / Features Coming Soon
----------------------------
- Add pictures
- Add option to import your own addresses/private keys
- Offset the white in the QR code (instead of just offsetting the
black)
- If you want any of these developed faster, send us a gift to our donation address above.
| 3d-wallet-generator | /3d-wallet-generator-0.2.0.tar.gz/3d-wallet-generator-0.2.0/README.rst | README.rst |
try:
import qr_tools as qrTools # Module for this project
except:
import gen_3dwallet.qr_tools as qrTools
try:
import TextGenerator as textGen # Module for this project
except:
import gen_3dwallet.TextGenerator as textGen
import bitcoin # sudo pip3 install bitcoin
import argparse
import time
import math
import sys
import os
import distutils.spawn
def parse_args():
parser = argparse.ArgumentParser(description='Generate an STL file of a 3D-printable bitcoin, litecoin, dogecoin, or other type of coin.', formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-ve', '--version', dest='versionByte', type=int, default=0, help='Version Bit of the address (for other altcoins).\nBitcoin: 0 (Default)\n Litecoin: 48\n Dogecoin: 30')
parser.add_argument('-ct', '--coin-title', dest='coinTitle', type=str, default="Bitcoin", help='Title of the coin, used for design purposes \n(Default: Bitcoin)')
parser.add_argument('-ls', '--layout-style', dest='layoutStyle', type=int, default=1, help="Layout style of the wallet.\n1) Address on the Front, Private Key on the Back (Default)\n2) Private Key Only\n3) Address Only (don't forget to export the Private Keys after)")
parser.add_argument('-wi', '--width', dest='walletWidth', type=float, default=54.0, help='The width of the wallet in mm. The length is calculated automatically. Default option is approximately standard credit card legnth and width. \n(Default: 54.0)')
parser.add_argument('-he', '--height', dest='walletHeight', type=float, default=8.0, help='The height of the wallet in mm. \n(Default: 8)')
parser.add_argument('-bo', '--black-offset', dest='blackOffset', type=int, default=-30, help='The percentage of the height that the black part of the QR code, and the text, will be raised or lowered by.\nNegative number for lowered, positive for raised. Option must be greater than -90. \n(Default: -20)')
parser.add_argument('-ec', '--qr-error-correction', dest='errorCorrection', type=str, default="M", help='The percentage of the QR codes that can be destroyed before they are irrecoverable\nL) 7 percent\nM) 15 percent (Default)\nQ) 25 percent\nH) 30 percent')
parser.add_argument('-dc', '--disable-round-corners', dest='roundCorners', action='store_false', help="Round the coners (four short edges) of the wallet. \n(Default: disabled)")
parser.add_argument('-co', '--copies', dest='copies', type=int, default=5, help='The number of wallets to generate. These will all be unique and randomly-generate wallets (not copies). \n(Default: 5)')
parser.add_argument('-sd', '--openscad-exe', dest='scadExe', type=str, default="openscad", help='The location and filename of the command line tools for OpenSCAD (leave as default if it is installed as a command [ie. Linux])\nIn most cases on Windows and Mac, the executable will be found automatically.')
parser.add_argument('-o', '--stl-folder', dest='outputSTLFolder', type=str, default="./WalletsOut/", help='The output folder to export the STL files into\n(Default: ./WalletsOut/)')
parser.add_argument('-oc', '--scad-folder', dest='outputSCADFolder', type=str, default='', help='The output folder to store the SCAD generation files in (optional, only used for debugging)\n(Default: disabled)')
parser.add_argument('-ea', '--export-address-csv', dest='exportAddressCSV', type=str, default='', help='The output CSV file to export the address list to (optional)\n(Default: disabled)')
parser.add_argument('-ep', '--export-privkey-csv', dest='exportPrivkeyCSV', type=str, default='', help='The output CSV file to export the private key list to (optional)\n(Default: disabled)')
parser.add_argument('-eap', '--export-address-privkey-csv', dest='exportAPCSV', type=str, default='', help='The output CSV file to export the address and private key list to, in the format of "address,privkey" (optional)\n(Default: disabled)')
parser.add_argument('-epa', '--export-privkey-address-csv', dest='exportPACSV', type=str, default='', help='The output CSV file to export the address and private key list to, in the format of "privkey,address" (optional)\n(Default: disabled)')
return parser.parse_args()
def main():
args = parse_args()
# Set DEBUG variable for testing purposes (changing styling)
# If true, prints the SCAD to the terminal and then breaks after first generation
DEBUG = False
# Generate the addresses
if args.copies < 1:
print("Please enter a valid number of copies (-co flag), and try again.")
sys.exit()
else: # Use an else statement here just in case we add the option to import a CSV file with the keys (generated somewhere else)
walletDataList = []
for i in range(args.copies):
thisData = {}
# Generate the addresses with keys
thisData["privateKey"] = bitcoin.main.random_key() # Secure: uses random library, time library and proprietary function
thisData["wif"] = bitcoin.encode_privkey(thisData["privateKey"], "wif", args.versionByte)
thisData["address"] = bitcoin.privkey_to_address(thisData["privateKey"], args.versionByte)
# Generate the QR codes
if args.errorCorrection.upper() not in ["L","M","Q","H"]:
print("Please select a valid QR Error Correction value (L, M, Q, or H).")
sys.exit()
thisData["wifQR"] = qrTools.getQRArray(thisData["wif"], args.errorCorrection.upper())
thisData["addressQR"] = qrTools.getQRArray(thisData["address"], args.errorCorrection.upper())
# Reverse them or else they appear backwards (unknown reason)
thisData["wifQR"] = list(reversed(thisData["wifQR"]))
thisData["addressQR"] = list(reversed(thisData["addressQR"]))
# Append ALL the wallet information, just in case we want to do something with it later
walletDataList.append(thisData)
# Validate other args and set some constants
walletWidth = args.walletWidth
walletHeight = args.walletHeight
if args.layoutStyle == 1 or args.layoutStyle == 2 or args.layoutStyle == 3:
walletLength = walletWidth*1.6 # Approximately the same ratio as a credit card
else:
print("Please choose a valid layout style option.")
sys.exit()
if args.blackOffset < -90.0:
print("Please ensure that --black-offset (-bo flag) is set correctly, and is greater than -90.")
sys.exit()
textDepth = (args.blackOffset/100) * walletHeight
# Check the openscad command
scadExe = args.scadExe
if args.scadExe == "openscad" and not distutils.spawn.find_executable("openscad"):
if os.path.isfile("/Applications/OpenSCAD.app/Contents/MacOS/OpenSCAD"):
print("Info: OpenSCAD found in Applications folder on Mac")
scadExe = "/Applications/OpenSCAD.app/Contents/MacOS/OpenSCAD"
elif os.path.isfile("%PROGRAMFILES%\OpenSCAD\openscad.exe"):
print("Info: OpenSCAD found in Program Files on Windows")
scadExe = "%PROGRAMFILES%\OpenSCAD\openscad.exe"
elif os.path.isfile("%PROGRAMFILES(x86)%\OpenSCAD\openscad.exe"):
print("Info: OpenSCAD found in Program Files (x86) on Windows")
scadExe = "%PROGRAMFILES(x86)%\OpenSCAD\openscad.exe"
if not distutils.spawn.find_executable(scadExe):
print("Please install OpenSCAD or specify the location of it with --openscad-exe.")
sys.exit()
# Set the master SCAD variable
masterSCAD = "// SCAD Code Generated By 3DGen.py - 3D Wallet Generator\n\n" # The beginning of the wallet are identical
scadOutputs = [] # Generated from loop for each wallet (different addresses)
# Include some modules at the beginning
masterSCAD += "// Import some modules\n"
masterSCAD += """
$fn=100;
module createMeniscus(h,radius)difference(){translate([radius/2+0.1,radius/2+0.1,0]){cube([radius+0.2,radius+0.1,h+0.2],center=true);}cylinder(h=h+0.2,r=radius,center=true);}
module roundCornersCube(x,y,z)translate([x/2,y/2,z/2]){difference(){r=((x+y)/2)*0.052;cube([x,y,z],center=true);translate([x/2-r,y/2-r]){rotate(0){createMeniscus(z,r);}}translate([-x/2+r,y/2-r]){rotate(90){createMeniscus(z,r);}}translate([-x/2+r,-y/2+r]){rotate(180){createMeniscus(z,r);}}translate([x/2-r,-y/2+r]){rotate(270){createMeniscus(z,r);}}}}
""" # The rounding corners modules for creating a rounded rectangle
masterSCAD += "\n"
# Draw the main prism
if args.roundCorners:
mainCube = "roundCornersCube(" + str(walletLength) + "," + str(walletWidth) + "," + str(walletHeight) + ");"
else:
mainCube = "cube([" + str(walletLength) + "," + str(walletWidth) + "," + str(walletHeight) + "]);"
mainCube += "\n\n"
# Init a variable to keep all the additive/subtractive parts
finalParts = []
# Init variables to keep the CSV output data in
addressOut = []
privkeyOut = []
APOut = []
PAOut = []
# Set a counter for naming the files
filenameCounter = 1
# Break into the loop for each wallet
for data in walletDataList:
# 'data' = wif, address, wifQR, addressQR
# Generate the texts
addressLine1 = data["address"][:math.ceil(len(data["address"])/2.0)]
addressLine2 = data["address"][math.ceil(len(data["address"])/2.0):]
wifLine1 = data["wif"][:17]
wifLine2 = data["wif"][17:34]
wifLine3 = data["wif"][34:]
addressLine1Dots = textGen.getArray(addressLine1)
addressLine2Dots = textGen.getArray(addressLine2)
privkeyLine1Dots = textGen.getArray(wifLine1)
privkeyLine2Dots = textGen.getArray(wifLine2)
privkeyLine3Dots = textGen.getArray(wifLine3)
bigTitle = textGen.getArray("3D " + args.coinTitle + " Wallet")
addressTitle = textGen.getArray("Address")
privkeyTitle = textGen.getArray("Private Key")
# Create the big title union so that it can be sized and moved
bigTitleUnion = ""
for rowIndex in range(len(bigTitle)):
row = bigTitle[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
bigTitleUnion += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
# Translate the title to where it goes
bigTitleFinal = "translate([(1/17)*length,(14/17)*width,0]){resize([(15/17)*length,0,0],auto=[true,true,false]){bigTitleUnion}}".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('bigTitleUnion',bigTitleUnion)
finalParts.append(bigTitleFinal+"\n\n")
if args.layoutStyle == 1:
# Need to copy it on to the backside as well - rotate then move it, and then create a union of the two titles (front and back)
bigTitle2 = "translate([length,0,height]){rotate(180,v=[0,1,0]){bigTitleFinal}}".replace('length',str(walletLength)).replace('height',str(walletHeight)).replace('bigTitleFinal',bigTitleFinal).replace('translateHeight',str(translateHeight))
finalParts.append(bigTitle2+"\n\n")
# Draw the word "Address" on the front, and draw on the actual address
if args.layoutStyle == 1 or args.layoutStyle == 3:
# Draw the address on the front
addressParts = []
# Create the address title union and size/move it
addressTitleUnion = "union(){"
for rowIndex in range(len(addressTitle)):
row = addressTitle[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
addressTitleUnion += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
addressTitleUnion += "}"
addressTitleFinal = "translate([(10/17)*length,(6/11)*width,0]){resize([0,(4/55)*width,0],auto=[true,true,false]){addressTitleUnion}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('addressTitleUnion',addressTitleUnion)
addressParts.append(addressTitleFinal)
# Create the first line of the address
addressLine1Union = "union(){"
for rowIndex in range(len(addressLine1Dots)):
row = addressLine1Dots[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
addressLine1Union += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
addressLine1Union += "}"
addressLine1Final = "translate([(8.2/17)*length,(5/11)*width,0]){resize([0,(3/55)*width,0],auto=[true,true,false]){addressLine1Union}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('addressLine1Union',addressLine1Union)
addressParts.append(addressLine1Final)
# Create the second line of the address
addressLine2Union = "union(){"
for rowIndex in range(len(addressLine2Dots)):
row = addressLine2Dots[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
addressLine2Union += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
addressLine2Union += "}"
addressLine2Final = "translate([(8.2/17)*length,(4.1/11)*width,0]){resize([0,(3/55)*width,0],auto=[true,true,false]){addressLine2Union}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('addressLine2Union',addressLine2Union)
addressParts.append(addressLine2Final)
# Create the QR code
addressQRUnion = "union(){"
for rowIndex in range(len(data["addressQR"])):
row = data["addressQR"][rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == 0:
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
addressQRUnion += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
addressQRUnion += "}"
addressQRFinal = "translate([(0.6/17)*length,(0.6/11)*width,0]){resize([0,(8/12)*width,0],auto=[true,true,false]){addressQRUnion}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('addressQRUnion',addressQRUnion)
addressParts.append(addressQRFinal)
finalParts.extend(addressParts)
# Draw all the things having to do with the private key
if args.layoutStyle == 1 or args.layoutStyle == 2:
privkeyParts = []
# Create the privkey title union and size/move it
privkeyTitleUnion = "union(){"
for rowIndex in range(len(privkeyTitle)):
row = privkeyTitle[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
privkeyTitleUnion += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
privkeyTitleUnion += "}"
privkeyTitleFinal = "translate([(8.7/17)*length,(7/11)*width,0]){resize([0,(4/55)*width,0],auto=[true,true,false]){privkeyTitleUnion}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('privkeyTitleUnion',privkeyTitleUnion)
privkeyParts.append(privkeyTitleFinal)
# Create the first line of the privkey
privkeyLine1Union = "union(){"
for rowIndex in range(len(privkeyLine1Dots)):
row = privkeyLine1Dots[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
privkeyLine1Union += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
privkeyLine1Union += "}"
privkeyLine1Final = "translate([(8.2/17)*length,(6/11)*width,0]){resize([0,(3/55)*width,0],auto=[true,true,false]){privkeyLine1Union}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('privkeyLine1Union',privkeyLine1Union)
privkeyParts.append(privkeyLine1Final)
# Create the second line of the privkey
privkeyLine2Union = "union(){"
for rowIndex in range(len(privkeyLine2Dots)):
row = privkeyLine2Dots[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
privkeyLine2Union += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
privkeyLine2Union += "}"
privkeyLine2Final = "translate([(8.2/17)*length,(5.1/11)*width,0]){resize([0,(3/55)*width,0],auto=[true,true,false]){privkeyLine2Union}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('privkeyLine2Union',privkeyLine2Union)
privkeyParts.append(privkeyLine2Final)
# Create the third line of the privkey
privkeyLine3Union = "union(){"
for rowIndex in range(len(privkeyLine3Dots)):
row = privkeyLine3Dots[rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == '1':
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
privkeyLine3Union += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
privkeyLine3Union += "}"
privkeyLine3Final = "translate([(8.2/17)*length,(4.2/11)*width,0]){resize([0,(3/55)*width,0],auto=[true,true,false]){privkeyLine3Union}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('privkeyLine3Union',privkeyLine3Union)
privkeyParts.append(privkeyLine3Final)
# Create the QR code
privkeyQRUnion = "union(){"
for rowIndex in range(len(data["wifQR"])):
row = data["wifQR"][rowIndex]
for colIndex in range(len(row)):
if row[colIndex] == 0:
translateHeight = walletHeight if textDepth>0 else walletHeight+textDepth
privkeyQRUnion += "translate([colIndex,rowIndex,translateHeight]){cube([1,1,textDepth]);}".replace('colIndex',str(colIndex)).replace('rowIndex',str(rowIndex)).replace('textDepth',str(abs(textDepth))).replace('translateHeight',str(translateHeight))
privkeyQRUnion += "}"
privkeyQRFinal = "translate([(0.6/17)*length,(0.6/11)*width,0]){resize([0,(8/12)*width,0],auto=[true,true,false]){privkeyQRUnion}}\n\n".replace('length',str(walletLength)).replace('width',str(walletWidth)).replace('privkeyQRUnion',privkeyQRUnion)
privkeyParts.append(privkeyQRFinal)
if args.layoutStyle == 2:
# Just add it all to the finalParts
finalParts.extend(privkeyParts)
elif args.layoutStyle == 1:
# Rotate it all and then add it to the finalParts
privkeyPartsNew = []
for part in privkeyParts:
privkeyPartsNew.append("translate([length,0,height]){rotate(180,v=[0,1,0]){part}}".replace('length',str(walletLength)).replace('height',str(walletHeight)).replace('part',part).replace('translateHeight',str(translateHeight)))
finalParts.extend(privkeyPartsNew)
# Put it all together
finalSCAD = masterSCAD
if textDepth < 0:
finalSCAD += "difference() {\n\n"
else:
finalSCAD += "union() {\n\n"
finalSCAD += mainCube
finalSCAD += "".join(finalParts)
finalSCAD += "}"
if DEBUG:
print(finalSCAD)
break
if args.outputSCADFolder:
try:
os.makedirs(args.outputSCADFolder)
except FileExistsError:
pass
scadOutFile = open(args.outputSCADFolder + '/wallet' + str(filenameCounter) + '.scad','w')
scadOutFile.write(finalSCAD)
scadOutFile.close()
# Log some info
print("Status: Done generating data for wallet #" + str(filenameCounter) + "...Starting generating STL file")
if args.outputSTLFolder:
try:
os.makedirs(args.outputSTLFolder)
except FileExistsError:
pass
scadOutFile = open('temp.scad','w')
scadOutFile.write(finalSCAD)
scadOutFile.close()
os.system(scadExe + " -o " + args.outputSTLFolder + "/wallet" + str(filenameCounter) + ".stl temp.scad")
try:
os.remove('temp.scad')
except:
pass
else:
print("Please provide a folder to output the STL files.")
# Update the CSV file variables
addressOut.append(data["address"])
privkeyOut.append(data["wif"])
APOut.append(data["address"] + "," + data["wif"])
PAOut.append(data["wif"] + "," + data["address"])
# Print some more stats
print("Status: Done generating STL file (" + str(round(filenameCounter/args.copies*100)) + "% done)")
filenameCounter += 1
# Export the CSV files
if args.exportAddressCSV:
csvFile = open(args.exportAddressCSV,'a')
csvFile.write(','.join(addressOut))
csvFile.close()
if args.exportPrivkeyCSV:
csvFile = open(args.exportPrivkeyCSV,'a')
csvFile.write(','.join(privkeyOut))
csvFile.close()
if args.exportAPCSV:
csvFile = open(args.exportAPCSV,'a')
csvFile.write('\n'.join(exportAPCSV))
csvFile.close()
if args.exportPACSV:
csvFile = open(args.exportPACSV,'a')
csvFile.write('\n'.join(exportPACSV))
csvFile.close() | 3d-wallet-generator | /3d-wallet-generator-0.2.0.tar.gz/3d-wallet-generator-0.2.0/gen_3dwallet/base.py | base.py |
# Character list matches up with the character table, which is decoded by the binary decoder
chars = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}"
# Character table defining 5x7 characters (adapted from table of dot matrix display codes)
characterTable = [[ 0, 0, 0, 0, 0, 0, 0],
[ 4, 0, 4, 4, 4, 4, 4],
[ 0, 0, 0, 0,10,10,10],
[10,10,31,10,31,10,10],
[ 4,30, 5,14,20,15, 4],
[ 3,19, 8, 4, 2,25,24],
[13,18,21, 8,20,18,12],
[ 0, 0, 0, 0, 8, 4,12],
[ 2, 4, 8, 8, 8, 4, 2],
[ 8, 4, 2, 2, 2, 4, 8],
[ 0, 4,21,14,21, 4, 0],
[ 0, 4, 4,31, 4, 4, 0],
[ 8, 4,12, 0, 0, 0, 0],
[ 0, 0, 0,31, 0, 0, 0],
[12,12, 0, 0, 0, 0, 0],
[ 0,16, 8, 4, 2, 1, 0],
[14,17,25,21,19,17,14],
[14, 4, 4, 4, 4,12, 4],
[31, 8, 4, 2, 1,17,14],
[14,17, 1, 2, 4, 2,31],
[ 2, 2,31,18,10, 6, 2],
[14,17, 1, 1,30,16,31],
[14,17,17,30,16, 8, 6],
[ 8, 8, 8, 4, 2, 1,31],
[14,17,17,14,17,17,14],
[12, 2, 1,15,17,17,14],
[ 0,12,12, 0,12,12, 0],
[ 8, 4,12, 0,12,12, 0],
[ 2, 4, 8,16, 8, 4, 2],
[ 0, 0,31, 0,31, 0, 0],
[16, 8, 4, 2, 4, 8,16],
[ 4, 0, 4, 2, 1,17,14],
[14,21,21,13, 1,17,14],
[17,17,31,17,17,17,14],
[30,17,17,30,17,17,30],
[14,17,16,16,16,17,14],
[30,17,17,17,17,17,30],
[31,16,16,30,16,16,31],
[16,16,16,30,16,16,31],
[15,17,17,23,16,17,14],
[17,17,17,31,17,17,17],
[14, 4, 4, 4, 4, 4,14],
[12,18, 2, 2, 2, 2, 7],
[17,18,20,24,20,18,17],
[31,16,16,16,16,16,16],
[17,17,17,21,21,27,17],
[17,17,19,21,25,17,17],
[14,17,17,17,17,17,14],
[16,16,16,30,17,17,30],
[13,18,21,17,17,17,14],
[17,18,20,30,17,17,30],
[30, 1, 1,14,16,16,15],
[ 4, 4, 4, 4, 4, 4,31],
[14,17,17,17,17,17,17],
[ 4,10,17,17,17,17,17],
[10,21,21,21,17,17,17],
[17,17,10, 4,10,17,17],
[ 4, 4, 4,10,17,17,17],
[31,16, 8, 4, 2, 1,31],
[14, 8, 8, 8, 8, 8,14],
[ 0, 1, 2, 4, 8,16, 0],
[14, 2, 2, 2, 2, 2,14],
[ 0, 0, 0, 0,17,10, 4],
[31, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 2, 4, 8],
[15,17,15, 1,14, 0, 0],
[30,17,17,25,22,16,16],
[14,17,16,16,14, 0, 0],
[15,17,17,19,13, 1, 1],
[14,16,31,17,14, 0, 0],
[ 8, 8, 8,28, 8, 9, 6],
[14, 1,15,17,15, 0, 0],
[17,17,17,25,22,16,16],
[14, 4, 4, 4,12, 0, 4],
[12,18, 2, 2, 2, 6, 2],
[18,20,24,20,18,16,16],
[14, 4, 4, 4, 4, 4,12],
[17,17,21,21,26, 0, 0],
[17,17,17,25,22, 0, 0],
[14,17,17,17,14, 0, 0],
[16,16,30,17,30, 0, 0],
[ 1, 1,15,19,13, 0, 0],
[16,16,16,25,22, 0, 0],
[30, 1,14,16,15, 0, 0],
[ 6, 9, 8, 8,28, 8, 8],
[13,19,17,17,17, 0, 0],
[ 4,10,17,17,17, 0, 0],
[10,21,21,17,17, 0, 0],
[17,10, 4,10,17, 0, 0],
[14, 1,15,17,17, 0, 0],
[31, 8, 4, 2,31, 0, 0],
[ 2, 4, 4, 8, 4, 4, 2],
[ 4, 4, 4, 4, 4, 4, 4],
[ 8, 4, 4, 2, 4, 4, 8]]
# Binary decode table
decTable = ["00000", "00001", "00010", "00011", "00100", "00101",
"00110", "00111", "01000", "01001", "01010", "01011",
"01100", "01101", "01110", "01111", "10000", "10001",
"10010", "10011", "10100", "10101", "10110", "10111",
"11000", "11001", "11010", "11011", "11100", "11101",
"11110", "11111"]
def getArray(text):
outArray = [""]*7 # Initialize the empty rows
for thisChar in text:
index = chars.index(thisChar)
thisDec = characterTable[index]
for rowIndex in range(len(thisDec)):
outArray[rowIndex] += (decTable[thisDec[rowIndex]])+"0" # Add onto the current row in outArray based on the decode table at this decimal value (thisDec[rowIndex] = thisRow), adding a 0 for spacing at the end
return outArray | 3d-wallet-generator | /3d-wallet-generator-0.2.0.tar.gz/3d-wallet-generator-0.2.0/gen_3dwallet/TextGenerator.py | TextGenerator.py |
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
from RenderPy.Ray import Ray
# ---------------------
"""
Computation class helps to save all necessary results takes place after we obtain an intersection object
It contains the following elements:
1. t: a scalar, the intersection is t units away from the origin of the ray
2. object: a class inheriting Shape, indicating the shape that has the intersection
3. point: a Tuple, the intersection point
4. eyev: a Tuple, the eye vector
5. normalv: a Tuple, the normal at the point
6. inside: a Bool, indicates whether the intersection is inside or outside the shape
7. reflectv: a Tuple, the reflected vector
8. n1: a float, a refractivity index
9. n2: a float, a refractivity index
Computation class contains the following functions:
__init__
__str__
schlick
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ComputationTest.py
--- OR ----
python3 -m nose -v ../test/ComputationTest.py
--- OR ----
python -m nose -v ../test/ComputationTest.py
---------------------------------------------------
"""
class Computation():
t = 0
shape = None
point = Tuple()
eyev = Tuple()
normalv = Tuple()
inside = False
# ---------------------
"""
Set up the computation object
---- Inputs: --------
* intersection: an Intersection
* ray: a Ray
* xs: an Intersection Array, containing all intersections from the world
---- Outputs: --------
* comp: a Computation
"""
# ---------------------
def __init__(self, intersection: "Intersection", ray: "Ray", xs=[]):
self.t = intersection.t
self.shape = intersection.shape
self.point = ray.position(self.t)
self.eyev = ~ray.direction
self.normalv = self.shape.normalAt(self.point, hit=intersection)
if self.normalv.dot(self.eyev) < 0:
self.inside = True
self.normalv = ~self.normalv
self.overPoint = self.point + self.normalv * 0.00001
self.underPoint = self.point - self.normalv * 0.00001
# reflectivity
self.reflectv = ray.direction.reflectV(self.normalv)
# refractivity
containers = []
for i in xs:
if i == intersection:
if len(containers) == 0:
self.n1 = 1
else:
self.n1 = containers[-1].material.refractiveIndex
if i.shape in containers:
containers.remove(i.shape)
else:
containers.append(i.shape)
if i == intersection:
if len(containers) == 0:
self.n2 = 1
else:
self.n2 = containers[-1].material.refractiveIndex
break
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ComputationTest.py:test_init
--- OR ----
python3 -m nose -v ../test/ComputationTest.py:test_init
--- OR ----
python -m nose -v ../test/ComputationTest.py:test_init
---------------------------------------------------
"""
# ---------------------
"""
Define the output format for Computation class
"""
# ---------------------
def __str__(self):
return "T: " + str(self.t) + "\n" + "Object: "+str(self.shape)+"\n" + "Point: "+str(self.point) + "\n" + "Eye: "+str(self.eyev) + "\n" + "Normal: "+str(self.normalv) + + "\n" + "Over Point: "+str(self.overPoint) + "\n" + "Under Point: "+str(self.underPoint) + "\n" + "Reflect Vector: "+str(self.reflectv) + "\n" + "n1: "+str(self.n1) + "\n" + "n2: "+str(self.n2)
# ---------------------
"""
Schlick implements the Fresnel effect to determine the reflectance
---- Outputs: --------
* num: a float, the reflectance of the ray
"""
# ---------------------
def schlick(self):
cos = self.eyev.dot(self.normalv)
if self.n1 > self.n2:
n = self.n1/self.n2
sin2T = n * n * (1-cos*cos)
if sin2T > 1:
return 1
cosT = (1-sin2T)
cos = cosT
r0 = (self.n1-self.n2)/(self.n1 + self.n2)
r0 = r0 * r0
return r0 + (1-r0)*((1-cos) ** 5)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ComputationTest.py:test_schlick
--- OR ----
python3 -m nose -v ../test/ComputationTest.py:test_schlick
--- OR ----
python -m nose -v ../test/ComputationTest.py:test_schlick
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Computation.py | Computation.py |
from RenderPy.Sphere import Sphere
from RenderPy.Color import Color
from RenderPy.Matrix import Matrix
from RenderPy.Light import Light
from RenderPy.Tuple import Tuple
from RenderPy.Ray import Ray
from RenderPy.Computation import Computation
from RenderPy.Material import Material
from RenderPy.Intersection import Intersection
# ---------------------
"""
World class contains all the lights and shapes in a scene.
It contains the following 2 elements:
1. lights: an array of Lights
2. shapes: an array of Shapes
World class contains the following functions:
__init__
defaultWorld
intersectWorld
shadeHit
reflectedColor
colorAt
isShadowed
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py
--- OR ----
python3 -m nose -v ../test/WorldTest.py
--- OR ----
python -m nose -v ../test/WorldTest.py
---------------------------------------------------
"""
class World():
# ---------------------
"""
World class takes in 2 input
lights is an array of Lights
shapes is an array of Shapes
"""
# ---------------------
def __init__(self, lights=[], shapes=[]):
self.lights = lights
self.shapes = shapes
# ---------------------
"""
defaultWorld renders a default world
containing two sphere, one is larger than the other, with the same center
the light is also default
Note: if you want to change material, remember to initialize a new instance, don't change in place
"""
# ---------------------
@staticmethod
def defaultWorld():
light = Light(Tuple.point(-10, 10, -10), Color(1, 1, 1))
s1 = Sphere()
s1.material = Material(color=Color(0.8, 1.0, 0.6),
diffuse=0.7, specular=0.2)
s2 = Sphere()
s2.material = Material(color=Color(1, 1, 1))
s2.transform = Matrix.scaling(0.5, 0.5, 0.5)
return World([light], [s1, s2])
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_defaultWorld
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_defaultWorld
--- OR ----
python -m nose -v ../test/WorldTest.py:test_defaultWorld
---------------------------------------------------
"""
# ---------------------
"""
Intersect world sets up the intersection of all objects in the world and the given ray
Then, sort the intersections by their t value
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def intersectWorld(self, ray: "Ray"):
total = 0
result = []
for s in self.shapes:
count, intersects = s.intersect(ray)
if count != 0:
total += count
result += intersects
result.sort(key=lambda x: x.t)
return total, result
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_intersectWorld
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_intersectWorld
--- OR ----
python -m nose -v ../test/WorldTest.py:test_intersectWorld
---------------------------------------------------
"""
# ---------------------
"""
shadeHit helps to calculate the shading on each different object
---- Inputs: --------
* computation: a Computation
* remaining: an Integer, indicating the number of recursion left
---- Outputs: --------
* color: a Color, the final representation on the object
"""
# ---------------------
def shadeHit(self, computation: "Computation", remaining=1):
col = Color(0, 0, 0)
for l in self.lights:
inShadow = self.isShadowed(l, computation.overPoint)
col += computation.shape.material.lighting(
l, computation.overPoint, computation.eyev, computation.normalv, inShadow, computation.shape.transform)
reflected = self.reflectedColor(computation, remaining)
refracted = self.refractedColor(computation, remaining)
mat = computation.shape.material
if mat.reflective > 0 and mat.transparency > 0:
reflectance = computation.schlick()
return col + reflected * reflectance + refracted * (1-reflectance)
return col + reflected + refracted
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_shadeHit
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_shadeHit
--- OR ----
python -m nose -v ../test/WorldTest.py:test_shadeHit
---------------------------------------------------
"""
# ---------------------
"""
colorAt helps to calculate the final color on the object
---- Inputs: --------
* ray: a Ray
* remaining: an Integer, indicates the number of recursion left
---- Outputs: --------
* color: a Color, the final color on the object
"""
# ---------------------
def colorAt(self, ray: "Ray", remaining=1):
count, xs = self.intersectWorld(ray)
h = Intersection.hit(xs)
if count == 0:
return Color(0, 0, 0)
comp = Computation(h, ray, xs)
return self.shadeHit(comp, remaining)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_colorAt
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_colorAt
--- OR ----
python -m nose -v ../test/WorldTest.py:test_colorAt
---------------------------------------------------
"""
# ---------------------
"""
isShadowed helps to determine whehter the shape is in shadow
---- Inputs: --------
* l: a Light, indicating the light we want to use to calculate shadow
* point: a Point, a point on the shape
---- Outputs: --------
* inShadow: a Bool, indicating whether the shape is in shadow
"""
# ---------------------
def isShadowed(self, l: "Light", point: "Tuple"):
v = l.position - point
distance = v.magnitude()
direction = v.normalize()
r = Ray(point, direction)
count, inters = self.intersectWorld(r)
h = Intersection.hit(inters)
return h != Intersection() and h.t < distance
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_isShadowed
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_isShadowed
--- OR ----
python -m nose -v ../test/WorldTest.py:test_isShadowed
---------------------------------------------------
"""
# ---------------------
"""
reflectedColor helps to determine the relfected color based on intersection
---- Inputs: --------
* comp: a Computation, contain all information of intersection
* remaining: an Integer, indicate the number of recursion left
---- Outputs: --------
* col: a Color, the reflected Color
"""
# ---------------------
def reflectedColor(self, comp: "Computation", remaining=1):
if remaining <= 0:
return Color()
r = comp.shape.material.reflective
if r == 0:
return Color()
reflectRay = Ray(comp.overPoint, comp.reflectv)
color = self.colorAt(reflectRay)
return color * r
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_refelctedColor
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_refelctedColor
--- OR ----
python -m nose -v ../test/WorldTest.py:test_refelctedColor
---------------------------------------------------
"""
# ---------------------
"""
refractedColor helps to determine the refracted color based on intersection
---- Inputs: --------
* comp: a Computation, contain all information of intersection
* remaining: an Integer, indicate the number of recursion left
---- Outputs: --------
* col: a Color, the reflected Color
"""
# ---------------------
def refractedColor(self, comp: "Computation", remaining=1):
if comp.shape.material.transparency == 0 or remaining == 0:
return Color()
nRatio = comp.n1/comp.n2
cosI = comp.eyev.dot(comp.normalv)
sin2T = nRatio * nRatio * (1-cosI*cosI)
if sin2T > 1:
return Color()
cosT = (1-sin2T) ** 0.5
direction = comp.normalv * (nRatio * cosI - cosT) - comp.eyev * nRatio
refractRay = Ray(comp.underPoint, direction)
return self.colorAt(refractRay, remaining - 1) * comp.shape.material.transparency
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/WorldTest.py:test_refractedColor
--- OR ----
python3 -m nose -v ../test/WorldTest.py:test_refractedColor
--- OR ----
python -m nose -v ../test/WorldTest.py:test_refractedColor
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/World.py | World.py |
import numpy as np
# ---------------------
"""
Color class describes colors based on r,g,b from 0 to 1, if there are exceptions it will be handled in the canvas class while output the image
Each color contains 3 elements: r,g,b in a numpy array
r,g,b are all float value
r: red, g: green, b: blue
Color class contains the following functions:
__init__
__str__
__eq__
__add__
__sub__
__mul__
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ColorTest.py
--- OR ----
python3 -m nose -v ../test/ColorTest.py
--- OR ----
python -m nose -v ../test/ColorTest.py
---------------------------------------------------
"""
class Color():
# ---------------------
"""
Color class takes in a group of three numbers or a numpy array
arr[0] is r, arr[1] is g, arr[2] is b
"""
# ---------------------
def __init__(self, r: float = None, g: float = None, b: float = None, arr: np.array = None):
if r == g == b == None:
try:
if arr.size != 0:
self.arr = arr
self.r = arr[0]
self.g = arr[1]
self.b = arr[2]
except:
self.r = 0
self.g = 0
self.b = 0
self.arr = np.array([0, 0, 0])
else:
self.r = r
self.g = g
self.b = b
self.arr = np.array([r, g, b])
# ---------------------
"""
Define the output format for Color class
"""
# ---------------------
def __str__(self):
return "({0},{1},{2})".format(self.r, self.g, self.b)
# ---------------------
"""
Define equivalence of two Color instances
This is based on numpy allclose function with absolute tolerance 0.00001
"""
# ---------------------
def __eq__(self, color2: "Color"):
return np.allclose(self.arr, color2.arr, atol=0.0001)
# ---------------------
"""
Define the sum between two Colors
---- Inputs: --------
* color2: A Color
---- Outputs: --------
* Color: the sum of two Colors
"""
# ---------------------
def __add__(self, color2: "Color"):
return Color(arr=self.arr + color2.arr)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ColorTest.py:test_add
--- OR ----
python3 -m nose -v ../test/ColorTest.py:test_add
--- OR ----
python -m nose -v ../test/ColorTest.py:test_add
---------------------------------------------------
"""
# ---------------------
"""
Define the difference between two Colors
---- Inputs: --------
* color2: A Color
---- Outputs: --------
* Color: the difference of two Colors
"""
# ---------------------
def __sub__(self, color2: "Color"):
return Color(arr=self.arr - color2.arr)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ColorTest.py:test_subtract
--- OR ----
python3 -m nose -v ../test/ColorTest.py:test_subtract
--- OR ----
python -m nose -v ../test/ColorTest.py:test_subtract
---------------------------------------------------
"""
# ---------------------
"""
Define the product of a Color and a scalar or another Color
Multiplying scalar is to create a new color
Multiplying two colors together is to blend these colors
The order is not interchangeable for color * scalar
But interchangeable for color * color
---- Inputs: --------
* multi: A scalar or a Color
---- Outputs: --------
* Tuple: the product of a Color and a scalar or another Color
"""
# ---------------------
def __mul__(self, multi):
if type(multi) == float or type(multi) == int:
return Color(arr=self.arr * multi)
return Color(arr=self.arr*multi.arr)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ColorTest.py:test_multi
--- OR ----
python3 -m nose -v ../test/ColorTest.py:test_multi
--- OR ----
python -m nose -v ../test/ColorTest.py:test_multi
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Color.py | Color.py |
import numpy as np
from RenderPy.Matrix import Matrix
from RenderPy.Tuple import Tuple
from RenderPy.Ray import Ray
from RenderPy.Canvas import Canvas
from RenderPy.World import World
# ---------------------
"""
Camera class helps to set up a camera in the scene and this would define the angle and distance we will be looking at the scene.
Each camera contains 7 elements:
hsize: a float, define the horizontal size of the canvas in pixels
vsize: a float, define the vertical size of the canvas in pixels
fieldOfView: a float, the radian angle describes how much the camera could see
transform: a Matrix, describe the transform matrix of the camera
halfWidth: a float, define the view on half of the width
halfHeight: a float, define the view on half of the height
pixelSize: a float define the size of a pixel in the scene
Camera class contains the following functions:
__init__
__eq__
rayForPixel
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CameraTest.py
--- OR ----
python3 -m nose -v ../test/CameraTest.py
--- OR ----
python -m nose -v ../test/CameraTest.py
---------------------------------------------------
"""
class Camera():
# ---------------------
"""
Camera class takes in three numbers
hsize is horizontal size, vsize is vertical size, fieldOfView is the field of view of the camera
"""
# ---------------------
def __init__(self, hsize: float, vsize: float, fieldOfView: float):
self.hsize = hsize
self.vsize = vsize
self.fieldOfView = fieldOfView
self.transform = Matrix(matrix=np.eye(4))
# In this part we calculate pixelSize
halfView = np.tan(fieldOfView/2)
# determine whether it is a horizontal or vertical view
aspect = hsize/vsize
if aspect >= 1:
self.halfWidth = halfView
self.halfHeight = halfView/aspect
else:
self.halfWidth = halfView * aspect
self.halfHeight = halfView
self.pixelSize = self.halfWidth * 2/self.hsize
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CameraTest.py:test_init
--- OR ----
python3 -m nose -v ../test/CameraTest.py:test_init
--- OR ----
python -m nose -v ../test/CameraTest.py:test_init
---------------------------------------------------
"""
# ---------------------
"""
Define equivalence of two Canvas instances
"""
# ---------------------
def __eq__(self, camera2: "Camera"):
return self.fieldOfView == camera2.fieldOfView and self.transform == camera2.transform and self.hsize == camera2.hsize and self.vsize == camera2.vsize
# ---------------------
"""
rayForPixel takes in the x and y coordinate and get the ray on that point
---- Inputs: --------
* px: a float, x coordinate
* py: a float, y coordinate
---- Outputs: --------
* ray: a ray on that pixel point
"""
# ---------------------
def rayForPixel(self, px: float, py: float):
xOffset = (px+0.5) * self.pixelSize
yOffset = (py+0.5) * self.pixelSize
wx = self.halfWidth - xOffset
wy = self.halfHeight - yOffset
pixel = ~self.transform * Tuple.point(wx, wy, -1)
origin = ~self.transform * Tuple.point(0, 0, 0)
direction = (pixel-origin).normalize()
return Ray(origin, direction)
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CameraTest.py:test_rayForPixel
--- OR ----
python3 -m nose -v ../test/CameraTest.py:test_rayForPixel
--- OR ----
python -m nose -v ../test/CameraTest.py:test_rayForPixel
---------------------------------------------------
"""
# ---------------------
"""
render generates the image
---- Inputs: --------
* world: a World containing all shapes and lights
---- Outputs: --------
* image: a Canvas containing the calculated values
"""
# ---------------------
def render(self, world: "World"):
image = Canvas(self.hsize, self.vsize)
for y in range(self.vsize):
for x in range(self.hsize):
ray = self.rayForPixel(x, y)
color = world.colorAt(ray)
image.writePixel(x, y, color)
return image
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CameraTest.py:test_render
--- OR ----
python3 -m nose -v ../test/CameraTest.py:test_render
--- OR ----
python -m nose -v ../test/CameraTest.py:test_render
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Camera.py | Camera.py |
import numpy as np
# ---------------------
"""
Tuple class describes the tuple we use to build a 3D renderer
This class is a base class for points and vectors
Each tuple contain 4 elements: x,y,z,w in a numpy array
x,y,z,w are all float value
x: x-coordinate, y: y-coordinate, z: z-coordinate
w = 0 indicate a vector, w = 1 indicate a point
Tuple class contains the following functions:
__init__
__str__
__eq__
__add__
__sub__
__mul__
__truediv__
__invert__
point
vector
magnitude
dot
cross
reflectV
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py
--- OR ----
python3 -m nose -v ../test/TupleTest.py
--- OR ----
python -m nose -v ../test/TupleTest.py
---------------------------------------------------
"""
class Tuple():
# ---------------------
"""
Tuple class takes in a group of 4 numbers or a numpy array
arr[0] is x, arr[1] is y, arr[2] is z, arr[3] is w
We support two ways of input:
Give four double: x,y,z,w
Or
Given a numpy array
"""
# ---------------------
def __init__(self, x: float = None, y: float = None, z: float = None, w: float = None, arr: np.array = None):
if x == y == z == w == None:
try:
if arr.size != 0:
self.x = arr[0]
self.y = arr[1]
self.z = arr[2]
self.w = arr[3]
self.arr = arr
except:
self.x = 0
self.y = 0
self.z = 0
self.w = 0
self.arr = np.array([0, 0, 0, 0])
else:
self.x = x
self.y = y
self.z = z
self.w = w
self.arr = np.array([x, y, z, w])
# ---------------------
"""
Define the output format for Tuple class
"""
# ---------------------
def __str__(self):
return "({0},{1},{2},{3})".format(self.x, self.y, self.z, self.w)
# ---------------------
"""
Define equivalence of two Tuple instances
This is based on numpy allclose function with absolute tolerance 0.00001
"""
# ---------------------
def __eq__(self, tuple2: "Tuple"):
if tuple2 == None:
return False
return np.allclose(self.arr, tuple2.arr, atol=0.0001)
# ---------------------
"""
Define the sum between two Tuples
Works for both points and vector
---- Inputs: --------
* tuple2: A Tuple
---- Outputs: --------
* Tuple: the sum of two tuples
"""
# ---------------------
def __add__(self, tuple2: "Tuple"):
return Tuple(arr=(self.arr + tuple2.arr))
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_add
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_add
--- OR ----
python -m nose -v ../test/TupleTest.py:test_add
---------------------------------------------------
"""
# ---------------------
"""
Define the difference between two Tuples
Works for both points and vector
---- Inputs: --------
* tuple2: A Tuple
---- Outputs: --------
* Tuple: the difference of two tuples
"""
# ---------------------
def __sub__(self, tuple2: "Tuple"):
return Tuple(arr=(self.arr - tuple2.arr))
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_subtract
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_subtract
--- OR ----
python -m nose -v ../test/TupleTest.py:test_subtract
---------------------------------------------------
"""
# ---------------------
"""
Define the product of a Tuple and a scalar
This is used for finding the point lies scalar times further in the direction of the given vector
The order is not interchangeable, must be tuple * scalar
Works vector only
---- Inputs: --------
* scalar: A scalar
---- Outputs: --------
* Tuple: the product of a Tuple and a scalar
"""
# ---------------------
def __mul__(self, scalar: float):
return Tuple(arr=self.arr * scalar)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_multiScalar
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_multiScalar
--- OR ----
python -m nose -v ../test/TupleTest.py:test_multiScalar
---------------------------------------------------
"""
# ---------------------
"""
Define the division of a Tuple and a scalar
The order is not interchangeable, must be tuple / scalar
Works for vector only
---- Inputs: --------
* scalar: A scalar
---- Outputs: --------
* Tuple: the product of a Tuple and a scalar
"""
# ---------------------
def __truediv__(self, scalar: float):
return Tuple(arr=self.arr / scalar)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_divScalar
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_divScalar
--- OR ----
python -m nose -v ../test/TupleTest.py:test_divScalar
---------------------------------------------------
"""
# ---------------------
"""
Negate multiply each element in the array by -1
Works for both point and vector
---- Outputs: --------
* Tuple: the negated tuple
"""
# ---------------------
def __invert__(self):
return Tuple(arr=-self.arr)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_negate
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_negate
--- OR ----
python -m nose -v ../test/TupleTest.py:test_negate
---------------------------------------------------
"""
# ---------------------
"""
Point is a Tuple having w=1
point is a static method
---- Inputs: --------
* x: x-coordinate
* y: y-coordinate
* z: z-coordinate
---- Outputs: --------
* Tuple: a Point
"""
# ---------------------
@staticmethod
def point(x: float, y: float, z: float):
return Tuple(x, y, z, 1)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_point
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_point
--- OR ----
python -m nose -v ../test/TupleTest.py:test_point
---------------------------------------------------
"""
# ---------------------
"""
Vector is a Tuple having w=0
vector is a static method
---- Inputs: --------
* x: x-coordinate
* y: y-coordinate
* z: z-coordinate
---- Outputs: --------
* Tuple: a Point
"""
# ---------------------
@staticmethod
def vector(x: float, y: float, z: float):
return Tuple(x, y, z, 0)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_vector
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_vector
--- OR ----
python -m nose -v ../test/TupleTest.py:test_vector
---------------------------------------------------
"""
# ---------------------
"""
Magnituude is used for discovering the distance represented by a vector
Magnitude is calculated based on Pythagoras' theorem
Works for vector only
---- Outputs: --------
* magnitude: a scalar
"""
# ---------------------
def magnitude(self):
return np.sqrt(sum(self.arr**2))
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_magnitude
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_magnitude
--- OR ----
python -m nose -v ../test/TupleTest.py:test_magnitude
---------------------------------------------------
"""
# ---------------------
"""
Normalize is used to converting a vector to a unit vector to make sure the rays are calculated standardly
Works for vector only
---- Outputs: --------
* normalizedVector: a Tuple
"""
# ---------------------
def normalize(self):
if self.magnitude() == 0:
return self
return self/self.magnitude()
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_magnitude
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_magnitude
--- OR ----
python -m nose -v ../test/TupleTest.py:test_magnitude
---------------------------------------------------
"""
# ---------------------
"""
Dot product is a standard way to understand the angle between two vectors, the smaller the result, the larger the angle
It is widely used to find intersactions of rays and objects.
Works for vector only
---- Inputs: --------
* tuple2: a Tuple
---- Outputs: --------
* dotProduct: a scalar
"""
# ---------------------
def dot(self, tuple2: "Tuple"):
return float(self.arr@tuple2.arr)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_dot
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_dot
--- OR ----
python -m nose -v ../test/TupleTest.py:test_dot
---------------------------------------------------
"""
# ---------------------
"""
Cross product is a standard way to find a third vector perpendicular to the existing two vectors.
However, given the vector has directions, and can be pointing the opposite direction
If we have the cross product order changed then we would have the result vector pointing to the opposite direction
Works for vector only
---- Inputs: --------
* tuple2: a Tuple
---- Outputs: --------
* crossProduct: a Tuple perpendicular to the given two vectors
"""
# ---------------------
def cross(self, tuple2: "Tuple"):
crossP = np.cross(self.arr[:-1], tuple2.arr[:-1])
return Tuple.vector(crossP[0], crossP[1], crossP[2])
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_cross
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_cross
--- OR ----
python -m nose -v ../test/TupleTest.py:test_cross
---------------------------------------------------
"""
# ---------------------
"""
reflectV is used to calculate the reflected vector based on the original input
and the calculated normal vector
Works for vector only
---- Inputs: --------
* normal: a Tuple, the normal vector
---- Outputs: --------
* crossProduct: a Tuple perpendicular to the given two vectors
"""
# ---------------------
def reflectV(self, normal: "Tuple"):
return self - normal * 2 * self.dot(normal)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TupleTest.py:test_reflectV
--- OR ----
python3 -m nose -v ../test/TupleTest.py:test_reflectV
--- OR ----
python -m nose -v ../test/TupleTest.py:test_reflectV
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Tuple.py | Tuple.py |
from RenderPy.Tuple import Tuple
from RenderPy.Matrix import Matrix
# ---------------------
"""
Ray class helps to describe the ray in the picture
Each ray contains 2 elements: origin and direction
origin, direction are both Tuple value
origin: the start point of the ray, direction: the moving direction of the ray
Ray class contains the following functions:
__init__
__str__
__eq__
position
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/RayTest.py
--- OR ----
python3 -m nose -v ../test/RayTest.py
--- OR ----
python -m nose -v ../test/RayTest.py
---------------------------------------------------
"""
class Ray():
# ---------------------
"""
Ray class takes in two Tuples
origin is the origin of the ray, a point
direction is the direction of the ray, a vector
"""
# ---------------------
def __init__(self, origin: "Tuple" = Tuple(), direction: "Tuple" = Tuple()):
self.origin = origin
self.direction = direction
# ---------------------
"""
Define the output format for Ray class
"""
# ---------------------
def __str__(self):
return "origin: " + str(self.origin) + " direction: " + str(self.direction)
# ---------------------
"""
Define equivalence of two Ray instances
"""
# ---------------------
def __eq__(self, ray2: "Ray"):
return self.direction == ray2.direction and self.origin == ray2.origin
# ---------------------
"""
Define the final point based on the given direction and origin of the ray
---- Inputs: --------
* lambda: a float
---- Outputs: --------
* Tuple: the point on the line that is t units away from origin
"""
# ---------------------
def position(self, t: float):
return self.origin + self.direction * t
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/RayTest.py:test_position
--- OR ----
python3 -m nose -v ../test/RayTest.py:test_position
--- OR ----
python -m nose -v ../test/RayTest.py:test_position
---------------------------------------------------
"""
# ---------------------
"""
Helps to find the new ray after a transformation
---- Inputs: --------
* matrix: a transformation matrix
---- Outputs: --------
* Ray: the new transformed ray
"""
# ---------------------
def transform(self, matrix: "Matrix"):
return Ray(origin=matrix*self.origin, direction=matrix*self.direction)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/RayTest.py:test_transform
--- OR ----
python3 -m nose -v ../test/RayTest.py:test_transform
--- OR ----
python -m nose -v ../test/RayTest.py:test_transform
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Ray.py | Ray.py |
from RenderPy.Tuple import Tuple
# ---------------------
"""
Intersection class helps to save a intersection point of the shape and the ray
It contains two elements: t, object
t: a scalar recording the intersection is t unit apart from the ray's origin
shape: the shape used to be intersected with the given ray
Intersection class contains the following functions:
__init__
__eq__
hit
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/IntersectionTest.py
--- OR ----
python3 -m nose -v ../test/IntersectionTest.py
--- OR ----
python -m nose -v ../test/IntersectionTest.py
---------------------------------------------------
"""
class Intersection():
# ---------------------
"""
Intersection class takes in no input
t is the intersection point
shape is the shape used to make calculation
"""
# ---------------------
def __init__(self, t: float = None, shape=None, u=None, v=None):
self.t = t
self.shape = shape
self.u = u
self.v = v
# ---------------------
"""
Define equivalence of two Intersection instances
"""
# ---------------------
def __eq__(self, i2: "Intersection"):
if i2 == None:
return False
return self.t == i2.t and self.shape == i2.shape and self.u == i2.u and self.v == i2.v
# ---------------------
"""
Define the print format of Intersection instances
"""
# ---------------------
def __str__(self):
return "t:" + str(self.t) + "\n" + "shape:" + str(self.shape)+"\n" + "u:" + str(self.u) + "\n" + "v:" + str(self.v) + "\n"
# ---------------------
"""
Find the intersection with the smallest t-value, ignore those negative values
---- Inputs: --------
* xs: a list of intersections
---- Outputs: --------
* results: the intersection with the smallest non-negative t-value or empty Intersection instance
"""
# ---------------------
@staticmethod
def hit(xs):
xs = [i for i in xs if i.t >= 0]
if len(xs) == 0:
return Intersection()
return min(xs, key=lambda x: x.t)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/IntersectionTest.py:test_hit
--- OR ----
python3 -m nose -v ../test/IntersectionTest.py:test_hit
--- OR ----
python -m nose -v ../test/IntersectionTest.py:test_hit
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Intersection.py | Intersection.py |
import re
from RenderPy.Tuple import Tuple
from RenderPy.Group import Group
from RenderPy.Triangle import Triangle
# ---------------------
"""
ObjParser class helps to parse .obj files into images and render them with triangles or smooth triangles.
It contains the following variable:
1. vertices: a list, a collection of different vertices.
2. normals: a list, a collection of different normals
3. textures: a list, a collection of different textures
4. defaultGroup: a Group, the shape that contains all triangles
Intersection class contains the following functions:
__init__
__eq__
hit
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ObjParserTest.py
--- OR ----
python3 -m nose -v ../test/ObjParserTest.py
--- OR ----
python -m nose -v ../test/ObjParserTest.py
---------------------------------------------------
"""
class ObjParser():
# ---------------------
"""
ObjParser class takes in no input
"""
# ---------------------
def __init__(self):
self.vertices = []
self.normals = []
self.textures = []
self.defaultGroup = Group()
self.cur = self.defaultGroup
self.subGroups = {}
# ---------------------
"""
Define equivalence of two ObjParser instances
"""
# ---------------------
def __eq__(self, p2: "ObjParser"):
if len(self.vertices) != len(p2.vertices):
return False
for i in range(len(self.vertices)):
if self.vertices[i] != p2.vertices[i]:
return False
if len(self.normals) != len(p2.normals):
return False
for i in range(len(self.normals)):
if self.normals[i] != p2.normals[i]:
return False
if len(self.textures) != len(p2.textures):
return False
for i in range(len(self.textures)):
if self.textures[i] != p2.textures[i]:
return False
return self.defaultGroup == p2.defaultGroup
# ---------------------
"""
parse reads in the obj file and holds everything in a group.
---- Inputs: --------
* path: a string, the path of the obj file
---- Outputs: --------
no output, all conversion will happen within the instance
"""
# ---------------------
def parse(self, path: str):
f = open(path, "r")
lines = f.readlines()
for l in lines:
self.convertLine(l)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ObjParserTest.py:test_parse
--- OR ----
python3 -m nose -v ../test/ObjParserTest.py:test_parse
--- OR ----
python -m nose -v ../test/ObjParserTest.py:test_parse
---------------------------------------------------
"""
# ---------------------
"""
convertLine tests whether a line is qualified and converts it into the formats we want.
---- Inputs: --------
* line: a string
---- Outputs: --------
no output, all conversion will happen within the instance
"""
# ---------------------
def convertLine(self, line: str):
# Here we use compile regex to help match the pattern of the phrases we want
# v is vector
# vt tecture, used in smooth triangle
# vn normal
rv = re.compile("(v|vn|vt) ([-+]?[0-9]+[\.]?[0-9]* ?){3}")
# f 1 2 3
rf1 = re.compile("f (\d+ ?){3,}")
# the two following type of fs are for smooth triangle
# f v1//vn1 1//2 1//3 1//4
rf2 = re.compile("f (\d+\/\/\d+ ?){3,}")
# f v1/vt1/vn1 1/2/3 1/3/4 1/4/5
rf3 = re.compile("f (\d+\/\d+\/\d+ ?){3,}")
rg = re.compile("g \w+")
if rv.match(line):
line = line.split(" ")
if line[0] == "v":
self.vertices.append(Tuple.point(
float(line[1]), float(line[2]), float(line[3])))
elif line[0] == "vn":
self.normals.append(Tuple.vector(
float(line[1]), float(line[2]), float(line[3])))
elif line[0] == "vt":
self.textures.append(Tuple.vector(
float(line[1]), float(line[2]), float(line[3])))
elif rf1.match(line):
line = line.split(" ")
head = int(line[1])-1
line = line[1:]
for i in range(1, len(line)-1):
t = Triangle(self.vertices[head],
self.vertices[int(line[i])-1],
self.vertices[int(line[i+1])-1])
self.cur.addChild(t)
elif rg.match(line):
line = line.split(" ")
groupName = line[1][:-1]
self.subGroups[groupName] = Group()
self.defaultGroup.addChild(self.subGroups[groupName])
self.cur = self.subGroups[groupName]
# ignore for now, this is for smooth triangle
elif rf2.match(line):
line = line.split(" ")
line = line[1:]
vert = []
norm = []
for l in line:
l = l.split("//")
vert.append(int(l[0])-1)
norm.append(int(l[1])-1)
for i in range(1, len(vert)-1):
t = Triangle(self.vertices[int(vert[0])],
self.vertices[int(vert[i])],
self.vertices[int(vert[i+1])],
self.normals[int(norm[0])],
self.normals[int(norm[i])],
self.normals[int(norm[i+1])])
self.cur.addChild(t)
elif rf3.match(line):
line = line.split(" ")
line = line[1:]
vert = []
norm = []
for l in line:
l = l.split("/")
vert.append(int(l[0])-1)
norm.append(int(l[2])-1)
for i in range(1, len(vert)-1):
t = Triangle(self.vertices[int(vert[0])],
self.vertices[int(vert[i])],
self.vertices[int(vert[i+1])],
self.normals[int(norm[0])],
self.normals[int(norm[i])],
self.normals[int(norm[i+1])])
self.cur.addChild(t)
# -----------------
"""
Make sure you are on ~/src
this is tested within the parser function
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/ObjParser.py | ObjParser.py |
import numpy as np
from RenderPy.Color import Color
from RenderPy.Matrix import Matrix
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Pattern class helps to establish the patterns that would show up on the shape
It contains a transform matrix and two colors to match up the colors, but could be expanded later.
1. transform: a Matrix, a transform matrix.
2. c1: a Color, one color in the pattern
3. c2: a Color, one color in the pattern
4. patternType: a String, indicates the type of pattern used in the material
Currently we only support 4 types of patterns:
stripe, gradient, ring and checker, we will support more in the future
Pattern class contains the following functions:
__init__
__eq__
__str__
patternAtObject
stripe
gradient
ring
checker
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PatternTest.py
--- OR ----
python3 -m nose -v ../test/PatternTest.py
--- OR ----
python -m nose -v ../test/PatternTest.py
---------------------------------------------------
"""
class Pattern():
# ---------------------
"""
Pattern class takes in two colors and a transform matrix and a string indicating the pattern type
"""
# ---------------------
def __init__(self, c1: "Color", c2: "Color", transform: "Matrix" = None, patternType: str = "stripe"):
self.c1 = c1
self.c2 = c2
if transform != None:
self.transform = transform
else:
self.transform = Matrix(matrix=np.eye(4))
self.patternType = patternType
# ---------------------
"""
Define equivalence of two Sphere instances
"""
# ---------------------
def __eq__(self, pattern2: "Pattern"):
return pattern2.c1 == self.c1 and pattern2.c2 == self.c2 and pattern2.transform == self.transform
# ---------------------
"""
Define the string of pattern
"""
# ---------------------
def __str__(self):
return "Color1: " + str(self.c1) + "\nColor2: " + str(self.c2) + "\nTransfrom Matrix: \n" + str(self.transform) + "\nPattern Type: " + self.patternType
# ---------------------
"""
Stripe pattern looks like ababab across the main axis
---- Inputs: --------
* point: a Point
* objTransform: a Matrix, the transform matrix of the object
---- Outputs: --------
* color: a Color, the color at the point
"""
# ---------------------
def patternAtObject(self, point: "Tuple", objTransform: "Matrix"):
objPoint = ~objTransform * point
patternPoint = ~self.transform*objPoint
if self.patternType == "stripe":
return self.stripe(patternPoint)
elif self.patternType == "gradient":
return self.gradient(patternPoint)
elif self.patternType == "ring":
return self.ring(patternPoint)
elif self.patternType == "checker":
return self.checker(patternPoint)
else:
return Color(patternPoint.x, patternPoint.y, patternPoint.z)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PatternTest.py:test_patternAtObject
--- OR ----
python3 -m nose -v ../test/PatternTest.py:test_patternAtObject
--- OR ----
python -m nose -v ../test/PatternTest.py:test_patternAtObject
---------------------------------------------------
"""
# ---------------------
"""
Stripe pattern looks like ababab across the main axis
---- Inputs: --------
* point: a Point
---- Outputs: --------
* color: a Color, the color at the point
"""
# ---------------------
def stripe(self, point: "Tuple"):
return self.c1 if np.floor(point.x) % 2 == 0 else self.c2
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PatternTest.py:test_stripe
--- OR ----
python3 -m nose -v ../test/PatternTest.py:test_stripe
--- OR ----
python -m nose -v ../test/PatternTest.py:test_stripe
---------------------------------------------------
"""
# ---------------------
"""
Gradient pattern looks like a->b incrementally across the main axis
---- Inputs: --------
* point: a Point
---- Outputs: --------
* color: a Color, the color at the point
"""
# ---------------------
def gradient(self, point: "Tuple"):
distance = self.c2-self.c1
fraction = float(point.x - np.floor(point.x))
return self.c1 + distance * fraction
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PatternTest.py:test_gradient
--- OR ----
python3 -m nose -v ../test/PatternTest.py:test_gradient
--- OR ----
python -m nose -v ../test/PatternTest.py:test_gradient
---------------------------------------------------
"""
# ---------------------
"""
Ring pattern looks like ababab along x-axis and z-axis and in a circle
---- Inputs: --------
* point: a Point
---- Outputs: --------
* color: a Color, the color at the point
"""
# ---------------------
def ring(self, point: "Tuple"):
if np.floor((point.x**2 + point.z**2)) % 2 == 0:
return self.c1
return self.c2
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PatternTest.py:test_ring
--- OR ----
python3 -m nose -v ../test/PatternTest.py:test_ring
--- OR ----
python -m nose -v ../test/PatternTest.py:test_ring
---------------------------------------------------
"""
# ---------------------
"""
3D Checker pattern looks like ababab along x-axis and y-axis and in a square but does not influence z-axis
---- Inputs: --------
* point: a Point
---- Outputs: --------
* color: a Color, the color at the point
"""
# ---------------------
def checker(self, point: "Tuple"):
if (np.floor(np.array([point.x, point.y, point.z])).sum()) % 2 == 0:
return self.c1
return self.c2
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PatternTest.py:test_checker
--- OR ----
python3 -m nose -v ../test/PatternTest.py:test_checker
--- OR ----
python -m nose -v ../test/PatternTest.py:test_checker
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Pattern.py | Pattern.py |
import numpy as np
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Group class helps to describe a collection of multiple shapes and they would be transformed together
It inherits all elements from shape
The group's normal is calculated within each sub shapes.
It has an array of shapes
Group class contains the following functions:
__init__
localIntersect
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/GroupTest.py
--- OR ----
python3 -m nose -v ../test/GroupTest.py
--- OR ----
python -m nose -v ../test/GroupTest.py
---------------------------------------------------
"""
class Group(Shape):
# ---------------------
"""
Group class takes in no input
"""
# ---------------------
def __init__(self, objs=None):
super().__init__()
if objs == None:
self.objects = []
else:
for i in objs:
i.parent = self
self.objects = objs
# ---------------------
"""
Define equivalence of two Cube instances
"""
# ---------------------
def __eq__(self, group2: "Group"):
if type(group2).__name__ != "Group":
return False
if len(self.objects) != len(group2.objects):
return False
for i in range(len(self.objects)):
if self.objects[i] != group2.objects[i]:
return False
return True
# ---------------------
"""
Add child addas a shape to the object array of the group
---- Inputs: --------
* shape: a Shape, that is any form like sphere, cube, cylinder or cone
"""
# ---------------------
def addChild(self, shape):
shape.parent = self
self.objects.append(shape)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/GroupTest.py:test_addChild
--- OR ----
python3 -m nose -v ../test/GroupTest.py:test_addChild
--- OR ----
python -m nose -v ../test/GroupTest.py:test_addChild
---------------------------------------------------
"""
# ---------------------
"""
Find the intersection between the ray and the cube
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
xs = []
c = 0
for i in self.objects:
count, result = i.intersect(ray)
c += count
xs += result
xs = sorted(xs, key=lambda x: x.t)
return c, xs
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/GroupTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/GroupTest.py:test_intersect
--- OR ----
python -m nose -v ../test/GroupTest.py:test_intersect
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Group.py | Group.py |
import numpy as np
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Triangle class helps to describe a triangle in the world,
it is established based on the input point values.
It could be used to form a large shape by different combinations.
It is useful for reading obj files
It inherits all elements from shape
Here, we combine the definition of smooth triangle and triangle together.
If you input n1,n2 and n3 for a triangle, it would be a smooth triangle.
Cone class contains the following functions:
__init__
localIntersect
localNormalAt
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TriangleTest.py
--- OR ----
python3 -m nose -v ../test/TriangleTest.py
--- OR ----
python -m nose -v ../test/TriangleTest.py
---------------------------------------------------
"""
class Triangle(Shape):
# ---------------------
"""
Triangle class takes in three points to describe the three corners of the triangle
"""
# ---------------------
def __init__(self, p1: "Tuple", p2: "Tuple", p3: "Tuple", n1: "Tuple" = None, n2: "Tuple" = None, n3: "Tuple" = None):
super().__init__()
self.p1 = p1
self.p2 = p2
self.p3 = p3
self.e1 = p2-p1
self.e2 = p3-p1
self.n1 = n1
self.n2 = n2
self.n3 = n3
self.normal = (self.e2.cross(self.e1)).normalize()
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TriangleTest.py:test_init
--- OR ----
python3 -m nose -v ../test/TriangleTest.py:test_init
--- OR ----
python -m nose -v ../test/TriangleTest.py:test_init
---------------------------------------------------
"""
# ---------------------
"""
Define equivalence of two Triangle instances
"""
# ---------------------
def __eq__(self, t2: "Triangle"):
if type(t2).__name__ != "Triangle":
return False
return self.material == t2.material and self.transform == t2.transform and t2.p1 == self.p1 and t2.p2 == self.p2 and t2.p3 == self.p3
# ---------------------
"""
Define print format for triangle instances
"""
# ---------------------
def __str__(self):
result = "p1:"+str(self.p1)+"\n" + "p2:" + \
str(self.p2)+"\n"+"p3:"+str(self.p3)+"\n"
if self.n1 != None:
result += "n1:"+str(self.n1) + "\n" + "n2: " + \
str(self.n2) + "\n" + "n3: "+str(self.n3) + "\n"
return result
# ---------------------
"""
Find the intersection between the ray and the cube
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
de2 = ray.direction.cross(self.e2)
det = self.e1.dot(de2)
if abs(det) < 0.00001:
return 0, []
f = 1/det
op1 = ray.origin-self.p1
u = op1.dot(de2) * f
if u < 0 or u > 1:
return 0, []
oe1 = op1.cross(self.e1)
v = ray.direction.dot(oe1)*f
if v < 0 or (u+v) > 1:
return 0, []
t = self.e2.dot(oe1)
if self.n1 != None:
return 1, [Intersection(t, self, u, v), ]
return 1, [Intersection(t, self), ]
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TriangleTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/TriangleTest.py:test_intersect
--- OR ----
python -m nose -v ../test/TriangleTest.py:test_intersect
---------------------------------------------------
"""
# ---------------------
"""
Find the normal at a certain point of the Cube
---- Inputs: --------
* point: a Tuple, indicating a point on the Cube
---- Outputs: --------
* vector: the normal vector
"""
# ---------------------
def localNormalAt(self, point: "Tuple", **kwargs):
if "hit" not in kwargs:
return self.normal
hit = kwargs["hit"]
return self.n2 * hit.u + self.n3 * hit.v + self.n1 * (1-hit.u-hit.v)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/TriangleTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/TriangleTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/TriangleTest.py:test_normalAt
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Triangle.py | Triangle.py |
import numpy as np
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Cube class helps to describe a cube with a center at point(0,0,0)
It inherits all elements from shape
Cube class contains the following functions:
__init__
localIntersect
localNormalAt
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CubeTest.py
--- OR ----
python3 -m nose -v ../test/CubeTest.py
--- OR ----
python -m nose -v ../test/CubeTest.py
---------------------------------------------------
"""
class Cube(Shape):
# ---------------------
"""
Cube class takes in no input
"""
# ---------------------
def __init__(self):
super().__init__()
# ---------------------
"""
Define equivalence of two Cube instances
"""
# ---------------------
def __eq__(self, cube2: "Cube"):
if type(cube2).__name__ != "Cube":
return False
return self.material == cube2.material and self.transform == cube2.transform
# ---------------------
"""
Find the intersection between the ray and the cube
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
def checkAxis(origin, direction):
tminNumerator = (-1-origin)
tmaxNumerator = (1-origin)
if abs(direction) >= 0.00001:
tmin = tminNumerator/direction
tmax = tmaxNumerator/direction
else:
tmin = tminNumerator * float(np.Infinity)
tmax = tmaxNumerator * float(np.Infinity)
if tmin > tmax:
tmin, tmax = tmax, tmin
return tmin, tmax
xtmin, xtmax = checkAxis(ray.origin.x, ray.direction.x)
ytmin, ytmax = checkAxis(ray.origin.y, ray.direction.y)
ztmin, ztmax = checkAxis(ray.origin.z, ray.direction.z)
tmin = max(xtmin, ytmin, ztmin)
tmax = min(xtmax, ytmax, ztmax)
if tmin > tmax:
return 0, []
return 2, [Intersection(tmin, self), Intersection(tmax, self)]
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CubeTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/CubeTest.py:test_intersect
--- OR ----
python -m nose -v ../test/CubeTest.py:test_intersect
---------------------------------------------------
"""
# ---------------------
"""
Find the normal at a certain point of the Cube
---- Inputs: --------
* point: a Tuple, indicating a point on the Cube
---- Outputs: --------
* vector: the normal vector
"""
# ---------------------
def localNormalAt(self, point: "Tuple"):
maxc = max(abs(point.x), abs(point.y), abs(point.z))
if maxc - abs(point.x) <= 0.0001:
return Tuple.vector(point.x, 0, 0)
elif maxc - abs(point.y) <= 0.0001:
return Tuple.vector(0, point.y, 0)
return Tuple.vector(0, 0, point.z)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CubeTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/CubeTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/CubeTest.py:test_normalAt
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Cube.py | Cube.py |
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Plane class helps to describe a plane with a center at point(0,0,0)
It inherits all elements from shape
Plane class contains the following functions:
__init__
localIntersect
localNormalAt
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PlaneTest.py
--- OR ----
python3 -m nose -v ../test/PlaneTest.py
--- OR ----
python -m nose -v ../test/PlaneTest.py
---------------------------------------------------
"""
class Plane(Shape):
# ---------------------
"""
Plane class takes in no input
"""
# ---------------------
def __init__(self):
super().__init__()
# ---------------------
"""
Define equivalence of two Plane instances
"""
# ---------------------
def __eq__(self, plane2: "Plane"):
if type(plane2).__name__ != "Plane":
return False
return self.material == plane2.material and self.transform == plane2.transform
# ---------------------
"""
Find the intersection between the ray and the plane
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
if abs(ray.direction.y) < 0.00001:
return 0, ()
return 1, [Intersection(-ray.origin.y/ray.direction.y, self), ]
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PlaneTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/PlaneTest.py:test_intersect
--- OR ----
python -m nose -v ../test/PlaneTest.py:test_intersect
---------------------------------------------------
"""
# ---------------------
"""
Find the normal at a certain point of the Plane
---- Inputs: --------
* point: a Tuple, indicating a point on the Plane
* hit: an Intersection, just follow the definition of shape localNormalAt
---- Outputs: --------
* vector: the normal vector
"""
# ---------------------
def localNormalAt(self, point: "Tuple"):
return Tuple.vector(0, 1, 0)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/PlaneTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/PlaneTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/PlaneTest.py:test_normalAt
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Plane.py | Plane.py |
import numpy as np
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Cylinder class helps to describe a cylinder with a center at point(0,0,0)
It inherits all elements from shape
Cylinder class contains the following functions:
__init__
localIntersect
localNormalAt
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CylinderTest.py
--- OR ----
python3 -m nose -v ../test/CylinderTest.py
--- OR ----
python -m nose -v ../test/CylinderTest.py
---------------------------------------------------
"""
class Cylinder(Shape):
# ---------------------
"""
Cylinder class takes in a minimum and a maximum to describe the height of a cylinder
"""
# ---------------------
def __init__(self, minimum=float("-inf"), maximum=float("inf"), closed=False):
super().__init__()
self.minimum = minimum
self.maximum = maximum
self.closed = closed
# ---------------------
"""
Define equivalence of two Cylinder instances
"""
# ---------------------
def __eq__(self, cylinder2: "Cylinder"):
if type(cylinder2).__name__ != "Cylinder":
return False
return self.material == cylinder2.material and self.transform == cylinder2.transform
# ---------------------
"""
Find the intersection between the ray and the cube
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
def checkCaps(t):
x = ray.origin.x + t*ray.direction.x
z = ray.origin.z + t*ray.direction.z
return (x*x + z*z) <= 1
def intersectCap(xs):
if not self.closed or abs(ray.direction.y) < 0.00001:
return len(xs), xs
t = (self.minimum - ray.origin.y)/ray.direction.y
if checkCaps(t):
xs.append(Intersection(t, self))
t = (self.maximum - ray.origin.y)/ray.direction.y
if checkCaps(t):
xs.append(Intersection(t, self))
return len(xs), xs
xs = []
a = ray.direction.x ** 2 + ray.direction.z**2
if a < 0.0001:
return intersectCap(xs)
b = 2*ray.origin.x*ray.direction.x + 2*ray.origin.z*ray.direction.z
c = ray.origin.x**2 + ray.origin.z**2 - 1
disc = b*b-4*a*c
if disc < 0:
return 0, ()
t0 = (-b-disc**0.5)/(2*a)
t1 = (-b+disc**0.5)/(2*a)
if t0 > t1:
t0, t1 = t1, t0
y0 = ray.origin.y + t0*ray.direction.y
if self.minimum < y0 < self.maximum:
xs.append(Intersection(t0, self))
y1 = ray.origin.y + t1*ray.direction.y
if self.minimum < y1 < self.maximum:
xs.append(Intersection(t1, self))
return intersectCap(xs)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CylinderTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/CylinderTest.py:test_intersect
--- OR ----
python -m nose -v ../test/CylinderTest.py:test_intersect
---------------------------------------------------
"""
# ---------------------
"""
Find the normal at a certain point of the Cube
---- Inputs: --------
* point: a Tuple, indicating a point on the Cube
---- Outputs: --------
* vector: the normal vector
"""
# ---------------------
def localNormalAt(self, point: "Tuple"):
dist = point.x * point.x + point.z * point.z
if dist < 1 and point.y >= self.maximum-0.00001:
return Tuple.vector(0, 1, 0)
elif dist < 1 and point.y <= self.minimum + 0.00001:
return Tuple.vector(0, -1, 0)
return Tuple.vector(point.x, 0, point.z)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CylinderTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/CylinderTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/CylinderTest.py:test_normalAt
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Cylinder.py | Cylinder.py |
import numpy as np
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
Cone class helps to describe a cone with a center at point(0,0,0)
It inherits all elements from shape
Cone class contains the following functions:
__init__
localIntersect
localNormalAt
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ConeTest.py
--- OR ----
python3 -m nose -v ../test/ConeTest.py
--- OR ----
python -m nose -v ../test/ConeTest.py
---------------------------------------------------
"""
class Cone(Shape):
# ---------------------
"""
Cone class takes in a minimum and a maximum to describe the height of a cone
"""
# ---------------------
def __init__(self, minimum=float("-inf"), maximum=float("inf"), closed=False):
super().__init__()
self.minimum = minimum
self.maximum = maximum
self.closed = closed
# ---------------------
"""
Define equivalence of two Cube instances
"""
# ---------------------
def __eq__(self, cone2: "Cone"):
if type(cone2).__name__ != "Cone":
return False
return self.material == cone2.material and self.transform == cone2.transform
# ---------------------
"""
Find the intersection between the ray and the cube
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
def checkCaps(t):
x = ray.origin.x + t*ray.direction.x
z = ray.origin.z + t*ray.direction.z
yVal = max(self.maximum, self.minimum)
return (x*x + z*z) <= yVal * yVal
def intersectCap(xs):
if not self.closed or abs(ray.direction.y) < 0.00001:
return len(xs), xs
t = (self.minimum - ray.origin.y)/ray.direction.y
if checkCaps(t):
xs.append(Intersection(t, self))
t = (self.maximum - ray.origin.y)/ray.direction.y
if checkCaps(t):
xs.append(Intersection(t, self))
return len(xs), xs
xs = []
a = ray.direction.x ** 2 + ray.direction.z**2 - ray.direction.y ** 2
b = 2*ray.origin.x*ray.direction.x + 2*ray.origin.z * \
ray.direction.z - 2 * ray.origin.y * ray.direction.y
c = ray.origin.x**2 + ray.origin.z**2 - ray.origin.y**2
if abs(a) < 0.00001 and abs(b) < 0.00001:
return intersectCap(xs)
elif abs(a) < 0.00001:
xs.append(Intersection(-c/(2*b), self))
else:
disc = b*b-4*a*c
if disc < 0:
return 0, ()
t0 = (-b-disc**0.5)/(2*a)
t1 = (-b+disc**0.5)/(2*a)
if t0 > t1:
t0, t1 = t1, t0
y0 = ray.origin.y + t0*ray.direction.y
if self.minimum < y0 < self.maximum:
xs.append(Intersection(t0, self))
y1 = ray.origin.y + t1*ray.direction.y
if self.minimum < y1 < self.maximum:
xs.append(Intersection(t1, self))
return intersectCap(xs)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ConeTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/ConeTest.py:test_intersect
--- OR ----
python -m nose -v ../test/ConeTest.py:test_intersect
---------------------------------------------------
"""
# ---------------------
"""
Find the normal at a certain point of the Cube
---- Inputs: --------
* point: a Tuple, indicating a point on the Cube
---- Outputs: --------
* vector: the normal vector
"""
# ---------------------
def localNormalAt(self, point: "Tuple"):
dist = point.x * point.x + point.z * point.z
if dist < 1 and point.y >= self.maximum-0.00001:
return Tuple.vector(0, 1, 0)
elif dist < 1 and point.y <= self.minimum + 0.00001:
return Tuple.vector(0, -1, 0)
y = dist ** 0.5
if point.y > 0:
y = -y
return Tuple.vector(point.x, y, point.z)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ConeTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/ConeTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/ConeTest.py:test_normalAt
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Cone.py | Cone.py |
import numpy as np
from RenderPy.Tuple import Tuple
from RenderPy.Color import Color
from RenderPy.Light import Light
from RenderPy.Pattern import Pattern
from RenderPy.Matrix import Matrix
# ---------------------
"""
Material class describes the material of a shape based on the Phong Reflection Model
Each material contains 5 elements:
1. color: a Color, the color of the material
2. ambient: a float, describe the ambient color of the material
3. diffuse: a float, describe the diffuse color of the material
4. specular: a float, describe the specular color of the material
5. shininess: a float, describe the shineness of the material
6. pattern: a Pattern, describe the pattern of the material
7. reflective: a float, indicates the reflectivity of the material
Material class contains the following functions:
__init__
__str__
__eq__
lighting
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MaterialTest.py
--- OR ----
python3 -m nose -v ../test/MaterialTest.py
--- OR ----
python -m nose -v ../test/MaterialTest.py
---------------------------------------------------
"""
class Material():
# ---------------------
"""
Material takes in nine parameters
"""
# ---------------------
def __init__(self, color: "Color" = Color(1, 1, 1), ambient: float = 0.1, diffuse: float = 0.9, specular: float = 0.9, shininess: float = 200, pattern: "Pattern" = None, reflective: float = 0, transparency: float = 0, refractiveIndex: float = 1):
self.color = color
self.ambient = ambient
self.diffuse = diffuse
self.specular = specular
self.shininess = shininess
self.pattern = pattern
self.reflective = reflective
self.transparency = transparency
self.refractiveIndex = refractiveIndex
# ---------------------
"""
Define the output format for Material class
"""
# ---------------------
def __str__(self):
return "Color: " + str(self.color) + "\nAmbient: " + str(self.ambient) + "\nDiffuse: " + str(self.diffuse) + "\nSpecular: " + str(self.specular) + "\nShininess: " + str(self.shininess)+"\nPattern: \n"+str(self.pattern)+"\nReflective: "+str(self.reflective)+"\nTransparency: "+str(self.transparency)+"\nRefractive Index: "+str(self.refractiveIndex)
# ---------------------
"""
Define equivalence of two Material instances
"""
# ---------------------
def __eq__(self, material2: "Light"):
return self.color == material2.color and self.ambient == material2.ambient and self.diffuse == material2.diffuse and self.specular == material2.specular and self.shininess == material2.shininess and self.pattern == material2.pattern
# ---------------------
"""
Get the final color when the shape is shined by a light
---- Inputs: --------
* light: a Light
* point: a Tuple, the place where we are looking at for the light and shape intersection
* eyev: a Tuple, the position of our eye
* normalv: a Tuple, the normal vector
* inShadow: a bool, indicate whether there is a shadow
* transform: a Matrix, the transform matrix of the shape, used for calculating pattern
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def lighting(self, light: "Light", point: "Tuple", eyev: "Tuple", normalv: "Tuple", inShadow: bool = False, transform: "Matrix" = Matrix(matrix=np.eye(4))):
if self.pattern is not None:
self.color = self.pattern.patternAtObject(point, transform)
effectiveColor = self.color * light.intensity
lightV = (light.position-point).normalize()
ambient = effectiveColor * self.ambient
if inShadow:
return ambient
lightDotNormal = lightV.dot(normalv)
# calculate diffuse and specular
if lightDotNormal < 0:
diffuse = Color(0, 0, 0)
specular = Color(0, 0, 0)
else:
diffuse = effectiveColor * self.diffuse * lightDotNormal
reflect = ~lightV.reflectV(normalv)
reflectDotEye = reflect.dot(eyev)
if reflectDotEye == 0:
specular = Color(0, 0, 0)
else:
factor = reflectDotEye**self.shininess
specular = light.intensity * self.specular * factor
return ambient + diffuse + specular
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MaterialTest.py:test_lighting
--- OR ----
python3 -m nose -v ../test/MaterialTest.py:test_lighting
--- OR ----
python -m nose -v ../test/MaterialTest.py:test_lighting
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Material.py | Material.py |
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Material import Material
from RenderPy.Intersection import Intersection
# ---------------------
"""
Sphere class helps to describe a sphere with a center at point(0,0,0)
It inherits all elements from shape
It contains a center element
center: a Tuple marks the center of the sphere
Sphere class contains the following functions:
__init__
localIntersect
localNormalAt
glassSphere
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/SphereTest.py
--- OR ----
python3 -m nose -v ../test/SphereTest.py
--- OR ----
python -m nose -v ../test/SphereTest.py
---------------------------------------------------
"""
class Sphere(Shape):
# ---------------------
"""
Sphere class takes in no input
center is the center point of sphere
"""
# ---------------------
def __init__(self):
super().__init__()
# ---------------------
"""
Define equivalence of two Sphere instances
"""
# ---------------------
def __eq__(self, sphere2: "Sphere"):
if type(sphere2).__name__ != "Sphere":
return False
return self.center == sphere2.center and self.material == sphere2.material and self.transform == sphere2.transform
# ---------------------
"""
Find the intersection between the ray and the sphere
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
sphereToRay = ray.origin-self.center
a = ray.direction.dot(ray.direction)
b = 2 * ray.direction.dot(sphereToRay)
c = sphereToRay.dot(sphereToRay) - 1
discriminant = b*b - 4*a*c
if discriminant < 0:
return 0, []
elif discriminant == 0:
return 1, [Intersection((-b-discriminant**0.5)/(2*a), self), ]
t1 = (-b-discriminant**0.5)/(2*a)
t1 = Intersection(t1, self)
t2 = (-b+discriminant**0.5)/(2*a)
t2 = Intersection(t2, self)
return 2, [t1, t2]
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/SphereTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/SphereTest.py:test_intersect
--- OR ----
python -m nose -v ../test/SphereTest.py:test_intersect
---------------------------------------------------
"""
# ---------------------
"""
Find the normal at a certain point of the sphere
---- Inputs: --------
* point: a Tuple, indicating a point on the sphere
---- Outputs: --------
* vector: the normal vector
"""
# ---------------------
def localNormalAt(self, point: "Tuple"):
return point-self.center
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/SphereTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/SphereTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/SphereTest.py:test_normalAt
---------------------------------------------------
"""
# ---------------------
"""
glassSphere method creates a glass sphere with transparency of 1 and refractive index 1.5
---- Outputs: --------
* s: a Sphere
"""
# ---------------------
@staticmethod
def glassSphere():
s = Sphere()
s.material = Material(transparency=1, refractiveIndex=1.5)
return s | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Sphere.py | Sphere.py |
import numpy as np
from RenderPy.Shape import Shape
from RenderPy.Tuple import Tuple
from RenderPy.Intersection import Intersection
# ---------------------
"""
CSG class helps to describe a shape that is built by some combinations of two shapes
The combinations it supports are union, intersection and difference
It inherits all elements from shape
This files contains two parts:
includes function
CSG class
CSG class contains the following functions:
__init__
intersectionAllowed
filterIntersection
localIntersect
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CSGTest.py
--- OR ----
python3 -m nose -v ../test/CSGTest.py
--- OR ----
python -m nose -v ../test/CSGTest.py
---------------------------------------------------
"""
# ---------------------
"""
includes check whehter shape1 includes shape2
---- Inputs: --------
* s1: a Shape, shape1
* s2: a Shape, shape2
---- Outputs: --------
* includes: a bool, whether shape1 includes shape2
"""
# ---------------------
def includes(s1, s2):
if type(s1).__name__ == "CSG":
return s1.left.includes(s2) or s1.right.includes(s2)
if type(s1).__name__ == "Group":
for i in s1.objects:
if i.includes(s2):
return True
return False
return s1 == s2
# -----------------
"""
Make sure you are on ~/src
this is tested within filterIntersection in CSG class
"""
class CSG(Shape):
# ---------------------
"""
Cube class takes in no input
"""
# ---------------------
def __init__(self, shape1, shape2, operation: "str"):
super().__init__()
self.left = shape1
self.right = shape2
self.operation = operation
shape1.parent = self
shape2.parent = self
# ---------------------
"""
Define equivalence of two Cube instances
"""
# ---------------------
def __eq__(self, csg2: "CSG"):
if type(csg2).__name__ != "CSG":
return False
return self.s1 == csg2.s1 and self.s2 == csg2.s2 and self.operation == csg2.operation
# ---------------------
"""
Intersection allowed helps to determine whehter there is a intersection
---- Inputs: --------
* lhit: a boolean, indicate whether there is a left hit
* inl: a boolean, indicate whether there is a inner left hit
* inr: a boolean, indicates whether there is a inner right hit
---- Outputs: --------
* allowed: a boolean, indicates whether the intersection is allowed
"""
# ---------------------
def intersectionAllowed(self, lhit: bool, inl: bool, inr: bool):
if self.operation == "union":
return (lhit and not inr) or (not lhit and not inl)
elif self.operation == "intersect":
return (lhit and inr) or (not lhit and inl)
elif self.operation == "difference":
return (lhit and not inr) or (not lhit and inl)
return False
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CubeTest.py:test_intersectionAllowed
--- OR ----
python3 -m nose -v ../test/CubeTest.py:test_intersectionAllowed
--- OR ----
python -m nose -v ../test/CubeTest.py:test_intersectionAllowed
---------------------------------------------------
"""
# ---------------------
"""
filterIntersection helps to find the valid intersections based on intersectionAllowed
---- Inputs: --------
* xs: a list of Intersections
---- Outputs: --------
* result: a list of Intersections
"""
# ---------------------
def filterIntersection(self, xs):
inl = False
inr = False
result = []
for i in xs:
lhit = includes(self.left, i.shape)
tmp = self.intersectionAllowed(lhit, inl, inr)
if self.intersectionAllowed(lhit, inl, inr):
result.append(i)
if lhit:
inl = not inl
else:
inr = not inr
return result
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CubeTest.py:test_filterIntersection
--- OR ----
python3 -m nose -v ../test/CubeTest.py:test_filterIntersection
--- OR ----
python -m nose -v ../test/CubeTest.py:test_filterIntersection
---------------------------------------------------
"""
# ---------------------
"""
Find the intersection between the ray and the cube
---- Inputs: --------
* ray: a Ray
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def localIntersect(self, ray: "Ray"):
countLeft, leftXs = self.left.intersect(ray)
countRight, rightXs = self.right.intersect(ray)
xs = leftXs + rightXs
xs = sorted(xs, key=lambda x: x.t)
result = self.filterIntersection(xs)
return len(result), result
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CubeTest.py:test_intersect
--- OR ----
python3 -m nose -v ../test/CubeTest.py:test_intersect
--- OR ----
python -m nose -v ../test/CubeTest.py:test_intersect
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/CSG.py | CSG.py |
import abc
import numpy as np
from RenderPy.Tuple import Tuple
from RenderPy.Matrix import Matrix
from RenderPy.Ray import Ray
from RenderPy.Material import Material
# ---------------------
"""
Shape class is a parent class containing all necessary compnents of a shape.
Sphere and all other specific classes inherits it.
It contains the following elements:
1. transform: a Matrix, recording the transform matrix
2. material: a Material, recording the material of the shape
3. parent: a Shape, the parent of a shape in a group
Shape class contains the following functions:
__init__
__str__
intersect
localIntersect: an abstract method, would be implemented by other classes
normalAt
localNormalAt: an abstract method, would be implemented by other classes
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/ShapeTest.py
--- OR ----
python3 -m nose -v ../test/ShapeTest.py
--- OR ----
python -m nose -v ../test/ShapeTest.py
---------------------------------------------------
"""
class Shape():
# ---------------------
"""
Shape class takes in no input
"""
# ---------------------
def __init__(self):
self.center = Tuple.point(0, 0, 0)
self.material = Material()
self.transform = Matrix(matrix=np.eye(4))
self.parent = None
# ---------------------
"""
Define the output format for Shape class
"""
# ---------------------
def __str__(self):
return "The transform matrix is: \n" + str(self.transform) + "\n" + "The material is as the following: \n" + str(self.material) + "\n"
# ---------------------
"""
Find the intersection between the ray and the shape with the world axis
---- Inputs: --------
* ray: a Ray with world axis
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def intersect(self, ray: "Ray"):
localRay = ray.transform(~self.transform)
return self.localIntersect(localRay)
# ---------------------
"""
Make sure you are on ~/src
use the test of each different shape's intersect function
"""
# ---------------------
"""
Find the intersection with the shape as the main axis and center at the origin
---- Inputs: --------
* ray: a Ray with sphere axis
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
@abc.abstractmethod
def localIntersect(self, ray: "Ray"):
raise NotImplementedError
# ---------------------
"""
Make sure you are on ~/src
this will be tested by all of the intersect function
"""
# ---------------------
"""
Find the normal between the ray and the shape with the world axis
---- Inputs: --------
* ray: a Ray with world axis
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
def normalAt(self, point: "Tuple", **kwargs):
localPoint = self.worldToObject(point)
if "hit" in kwargs:
localNormal = self.localNormalAt(localPoint, hit=kwargs["hit"])
else:
localNormal = self.localNormalAt(localPoint)
return self.normalToWorld(localNormal)
# ---------------------
"""
Make sure you are on ~/src
use the test of each different shape's normalAt function
also, we have a specific test for group normal at the following:
---------------------------------------------------
nosetests -v ../test/ShapeTest.py:test_normalAt
--- OR ----
python3 -m nose -v ../test/ShapeTest.py:test_normalAt
--- OR ----
python -m nose -v ../test/ShapeTest.py:test_normalAt
---------------------------------------------------
"""
# ---------------------
"""
Find the normal with the shape as the main axis and center at the origin
---- Inputs: --------
* ray: a Ray with sphere axis
---- Outputs: --------
* count: a scalar, the number of intersections
* results: a tuple, all intersections are listed
"""
# ---------------------
@abc.abstractmethod
def localNormalAt(self, point: "Tuple", **kwargs):
raise NotImplementedError
# ---------------------
"""
Make sure you are on ~/src
this will be tested with all of normalAt function
"""
# ---------------------
"""
World to object converts the normal at the point of the shape to the shape axis
---- Inputs: --------
* point: a Tuple, indicating a point on the Shape
---- Outputs: --------
* point: a Tuple, the converted point
"""
# ---------------------
def worldToObject(self, point: "Tuple"):
if self.parent != None:
point = self.parent.worldToObject(point)
return ~self.transform * point
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/GroupTest.py:test_worldToObject
--- OR ----
python3 -m nose -v ../test/GroupTest.py:test_worldToObject
--- OR ----
python -m nose -v ../test/GroupTest.py:test_worldToObject
---------------------------------------------------
"""
# ---------------------
"""
Normal to World converts the normal at the point of the shape to the world axis
---- Inputs: --------
* nromal: a Tuple, indicating a point on the Shape
---- Outputs: --------
* normal: a Tuple, the converted normal vector
"""
# ---------------------
def normalToWorld(self, normal: "Tuple"):
normal = (~self.transform).transpose() * normal
normal.w = 0
normal.arr[3] = 0
normal = normal.normalize()
if self.parent != None:
normal = self.parent.normalToWorld(normal)
return normal
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/GroupTest.py:test_normalToWorld
--- OR ----
python3 -m nose -v ../test/GroupTest.py:test_normalToWorld
--- OR ----
python -m nose -v ../test/GroupTest.py:test_normalToWorld
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Shape.py | Shape.py |
import os
import numpy as np
from PIL import Image
from RenderPy.Color import Color
# ---------------------
"""
Canvas class helps to describe a set of pixel in grids that help generate images.
Each canvas contains 2 elements: weight ,height
width, height are all float value
width: defining the number of columns, height: defining the number of rows
Canvas class contains the following functions:
__init__
__eq__
pixelAt
writePixel
canvasToPPM
canvasToPNG
saveImage
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CanvasTest.py
--- OR ----
python3 -m nose -v ../test/CanvasTest.py
--- OR ----
python -m nose -v ../test/CanvasTest.py
---------------------------------------------------
"""
class Canvas():
# ---------------------
"""
Canvas class takes in two numbers
w is width, h is height
"""
# ---------------------
def __init__(self, w: int, h: int):
self.width = w
self.height = h
self.canv = [[Color() for _ in range(w)] for _ in range(h)]
# ---------------------
"""
Define equivalence of two Canvas instances
"""
# ---------------------
def __eq__(self, canvas2: "Canvas"):
if self.width == canvas2.width and self.height == canvas2.height:
for i in range(self.height):
for j in range(self.width):
if self.canv[i][j] != canvas2.canv[i][j]:
return False
return True
return False
# ---------------------
"""
Get the color of a given pixel
---- Inputs: --------
* cl: A float indicating the column number of where the pixel is at
* rw: A float indicating the row number of where the pixel is at
---- Outputs: --------
* Color: the color at the pixel
"""
# ---------------------
def pixelAt(self, cl: int, rw: int):
return self.canv[rw][cl]
# ---------------------
"""
Change the color of a given pixel
---- Inputs: --------
* cl: A float indicating the column number of where the pixel is at
* rw: A float indicating the row number of where the pixel is at
* color: A Color wanted to be at the pixel
"""
# ---------------------
def writePixel(self, cl: int, rw: int, color: "Color"):
self.canv[rw][cl] = color
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CanvasTest.py:test_writePixel
--- OR ----
python3 -m nose -v ../test/CanvasTest.py:test_writePixel
--- OR ----
python -m nose -v ../test/CanvasTest.py:test_writePixel
---------------------------------------------------
"""
# ---------------------
"""
Convert the canvas to ppm formatted images
Generally existing PPM softwares accept a line more than 70 characters,
but there are some needs to have each line having less than or equal to 70 characters
We also need a new line at the end of the string
---- Outputs: --------
* result: A string containing the final ppm file
"""
# ---------------------
def canvasToPPM(self):
result = "P3\n"+str(self.width) + " " + str(self.height) + "\n255\n"
for row in self.canv:
temp = ""
for pix in row:
# providing a conversion from 0 to 1 to 255 scale
# if greater than 1, we read it as 1
# if smaller than 0, we read it as 0
def setColor(color):
if color >= 1:
return 255
elif color <= 0:
return 0
else:
return int(round(color * 255, 0))
red = str(setColor(pix.r))
green = str(setColor(pix.g))
blue = str(setColor(pix.b))
# for each color, if the existing line adding 1 to 3 characters
# we cut it off and strip the last space and add a new line
# so that we fulfill the 70 character requirment and do not cut off a color
if len(temp) + len(red) > 70:
result += temp[:-1] + "\n"
temp = ""
temp += red + " "
if len(temp) + len(green) > 70:
result += temp[:-1] + "\n"
temp = ""
temp += green + " "
if len(temp) + len(blue) > 70:
result += temp[:-1] + "\n"
temp = ""
temp += blue + " "
temp = temp[:-1] + "\n"
result += temp
return result
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CanvasTest.py:test_canvasToPPM
--- OR ----
python3 -m nose -v ../test/CanvasTest.py:test_canvasToPPM
--- OR ----
python -m nose -v ../test/CanvasTest.py:test_canvasToPPM
---------------------------------------------------
"""
# ---------------------
"""
Convert the canvas to a numpy array in order to call PIL.image to convert it to png image
---- Outputs: --------
* result: A numpy array of size (h,w,3)
"""
# ---------------------
def canvasToPNG(self):
result = []
for rw in range(self.height):
row = []
for cl in range(self.width):
cur = np.rint(self.pixelAt(cl, rw).arr*255)
if cur[0] > 255:
cur[0] = 255
elif cur[0] < 0:
cur[0] = 0
if cur[1] > 255:
cur[1] = 255
elif cur[1] < 0:
cur[1] = 0
if cur[2] > 255:
cur[2] = 255
elif cur[2] < 0:
cur[2] = 0
row.append(cur)
result.append(row)
result = np.array(result)
result = result.astype(np.uint8)
return result
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/CanvasTest.py:test_canvasToPNG
--- OR ----
python3 -m nose -v ../test/CanvasTest.py:test_canvasToPNG
--- OR ----
python -m nose -v ../test/CanvasTest.py:test_canvasToPNG
---------------------------------------------------
"""
# ---------------------
"""
Save the result string from canvasToPPM to ppm file
---- Inputs: --------
* filename: A string indicating the file name you want for the image
* directory: default is the images folder, or a specefic one of your choice
"""
# ---------------------
def saveImage(self, filename: str, directory: str = "../images/", fileType="png"):
if not os.path.isdir(directory):
os.mkdir(directory)
path = directory + filename + "." + fileType
if fileType == "ppm":
result = self.canvasToPPM()
f = open(path, "w")
f.write(result)
f.close()
else:
result = self.canvasToPNG()
img = Image.fromarray(result, 'RGB')
img.save(path)
print(
filename + " written successfully, please take a look at folder " + directory)
# -----------------
"""
Go to your chosen folder to see whether the image is what you want!
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Canvas.py | Canvas.py |
import math
import numpy as np
from RenderPy.Tuple import Tuple
# ---------------------
"""
Matrix class helps to describe a set of elements.
Each matrix contains 3 elements: weight ,height, matrix
width, height are all float value
width: defining the number of columns, height: defining the number of rows
matrix: defining a numpy matrix containing necessary numbers
Note: matrix class contains several transformation matrices.
In order to chain them, we need to have the last operation matrix multiply the previous matrix
Matrix class contains the following functions:
__init__
__str__
__eq__
__mul__
__invert__
identity
determinant
translation
scaling
rotateX
rotateY
rotateZ
shearing
viewTransform
"""
# ---------------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py
--- OR ----
python3 -m nose -v ../test/MatrixTest.py
--- OR ----
python -m nose -v ../test/MatrixTest.py
---------------------------------------------------
"""
class Matrix():
# ---------------------
"""
Matrix class takes in two numbers
w is width, h is height, matrix is a matrix
"""
# ---------------------
def __init__(self, w: int = None, h: int = None, matrix: np.ndarray = None):
if w == h == None:
try:
if matrix.size != 0:
self.matrix = matrix
self.width = matrix.shape[1]
self.height = matrix.shape[0]
except:
self.width = 0
self.height = 0
self.matrix = np.zeros((1, 1))
else:
self.width = w
self.height = h
self.matrix = np.zeros((h, w))
# ---------------------
"""
Define the output format for Matrix class
"""
# ---------------------
def __str__(self):
return str(self.matrix)
# ---------------------
"""
Define equivalence of two Matrix instances
This is based on numpy allclose function with absolute tolerance 0.00001
"""
# ---------------------
def __eq__(self, matrix2: "Matrix"):
return np.allclose(self.matrix, matrix2.matrix, atol=0.0001)
# ---------------------
"""
Define the multiplication between two Matrix (Cross product)
This helps to perform transformations
Define the multiplication between matrix and vector
Define the multiplication between matrix and scalar
Order matters, it is not interchangable
---- Inputs: --------
* value: A Matrix or a Tuple or a float
---- Outputs: --------
* Matrix: the result from matrix multiplication or a tuple from matrix and tuple multiplication or a matrix from matrix and scalar multiplication
"""
# ---------------------
def __mul__(self, value):
if type(value) == float:
return Matrix(matrix=self.matrix*value)
elif type(value).__name__ == "Tuple":
return Tuple(arr=np.matmul(self.matrix, value.arr))
return Matrix(matrix=np.matmul(self.matrix, value.matrix))
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_mul
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_mul
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_mul
---------------------------------------------------
"""
# ---------------------
"""
This calculates the inversion of a matrix
---- Outputs: --------
* Matrix: the inverse of the matrix
"""
# ---------------------
def __invert__(self):
return Matrix(matrix=np.linalg.inv(self.matrix))
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_inversion
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_inversion
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_inversion
---------------------------------------------------
"""
# ---------------------
"""
Define the identity based on the matrix's shape
---- Outputs: --------
* Matrix: the identity matrix
"""
# ---------------------
def identity(self):
return Matrix(matrix=np.eye(self.height))
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_identity
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_identity
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_identity
---------------------------------------------------
"""
# ---------------------
"""
Obtain the transposed matrix of the original one
Transpose means flipping the matrix over the left diagonal
---- Outputs: --------
* Matrix: the transposed matrix
"""
# ---------------------
def transpose(self):
return Matrix(matrix=self.matrix.T)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_transpose
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_transpose
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_transpose
---------------------------------------------------
"""
# ---------------------
"""
Obtain the determinant of the matrix
It is used to determine whether this matrix has an inversion
---- Outputs: --------
* result: a scalar, the determinant
"""
# ---------------------
def determinant(self):
return np.linalg.det(self.matrix)
# -----------------
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_deter
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_deter
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_deter
---------------------------------------------------
"""
# ---------------------
"""
Translation matrix helps to transform a point to a desired location
However, it does not work for vectors
Inverse of the translation matrix means working on the opposite direction
translation is a static method
---- Inputs: --------
* x: the amount wants to transform on x-axis
* y: the amount wants to transform on y-axis
* z: the amount wants to transform on z-axis
---- Outputs: --------
* Matrix: the translation matrix
"""
# ---------------------
@staticmethod
def translation(x: float, y: float, z: float):
m = np.eye(4)
m[0, 3] = x
m[1, 3] = y
m[2, 3] = z
return Matrix(matrix=m)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_translation
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_translation
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_translation
---------------------------------------------------
"""
# ---------------------
"""
Scale matrix helps to transform the size of an object
Inverse of the scaling matrix means scaling by the inverse of the input
scaling is a static method
---- Inputs: --------
* x: the amount wants to be scaled on x-axis
* y: the amount wants to be scaled on y-axis
* z: the amount wants to be scaled on z-axis
---- Outputs: --------
* Matrix: the scaling matrix
"""
# ---------------------
@staticmethod
def scaling(x: float, y: float, z: float):
m = np.eye(4)
m[0, 0] = x
m[1, 1] = y
m[2, 2] = z
return Matrix(matrix=m)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_scaling
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_scaling
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_scaling
---------------------------------------------------
"""
# ---------------------
"""
RotateX matrix helps to rotate the object according to x-axis
Inverse of the matrix means rotating at an opposite direction
rotateX is a static method
---- Inputs: --------
* angle: the rotate angle in radian
---- Outputs: --------
* Matrix: the rotation matrix
"""
# ---------------------
@staticmethod
def rotateX(theta: float):
m = np.eye(4)
m[1, 1] = math.cos(theta)
m[1, 2] = -math.sin(theta)
m[2, 1] = math.sin(theta)
m[2, 2] = math.cos(theta)
return Matrix(matrix=m)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_rotateX
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_rotateX
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_rotateX
---------------------------------------------------
"""
# ---------------------
"""
RotateY matrix helps to rotate the object according to y-axis
Inverse of the matrix means rotating at an opposite direction
rotateY is a static method
---- Inputs: --------
* angle: the rotate angle in radian
---- Outputs: --------
* Matrix: the rotation matrix
"""
# ---------------------
@staticmethod
def rotateY(theta: float):
m = np.eye(4)
m[0, 0] = math.cos(theta)
m[0, 2] = math.sin(theta)
m[2, 0] = -math.sin(theta)
m[2, 2] = math.cos(theta)
return Matrix(matrix=m)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_rotateY
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_rotateY
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_rotateY
---------------------------------------------------
"""
# ---------------------
"""
RotateZ matrix helps to rotate the object according to z-axis
Inverse of the matrix means rotating at an opposite direction
rotateZ is a static method
---- Inputs: --------
* angle: the rotate angle in radian
---- Outputs: --------
* Matrix: the rotation matrix
"""
# ---------------------
@staticmethod
def rotateZ(theta: float):
m = np.eye(4)
m[0, 0] = math.cos(theta)
m[0, 1] = -math.sin(theta)
m[1, 0] = math.sin(theta)
m[1, 1] = math.cos(theta)
return Matrix(matrix=m)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_rotateY
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_rotateY
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_rotateY
---------------------------------------------------
"""
# ---------------------
"""
Shearing matrix helps to slant a straight line.
X-coordinates would change in proportion to Y-axis and Z-axis,
Y-coordinates changes in proportion to X-axis and Z-axis,
Z-coordinates changes in proportion to X-axis and Y-axis.
Inverse of the matrix means shifting the line on an opposite direction
shearing is a static method
---- Inputs: --------
* xy: a float, the change of X-coordinates in proportion to Y-axis
* xz: a float, the change of X-coordinates in proportion to Z-axis
* yx: a float, the change of Y-coordinates in proportion to X-axis
* yz: a float, the change of Y-coordinates in proportion to Z-axis
* zx: a float, the change of Z-coordinates in proportion to X-axis
* zy: a float, the change of Z-coordinates in proportion to Y-axis
---- Outputs: --------
* Matrix: the rotation matrix
"""
# ---------------------
@staticmethod
def shearing(xy: float, xz: float, yx: float, yz: float, zx: float, zy: float):
m = np.eye(4)
m[0, 1] = xy
m[0, 2] = xz
m[1, 0] = yx
m[1, 2] = yz
m[2, 0] = zx
m[2, 1] = zy
return Matrix(matrix=m)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_shearing
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_shearing
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_shearing
---------------------------------------------------
"""
# ---------------------
"""
View transformation matrix helps to transform the camera
Inverse of the view transform matrix means undo the transformation on the camera
viewTransformation is a static method
---- Inputs: --------
* fromV: a Point, indicating where the eye is in the scene
* toV: a Point, indicating the direction the eye is looking at the scene
* upV: a Vector, indicating which direction is up in the scene
---- Outputs: --------
* Matrix: the view transformation matrix
"""
# ---------------------
@staticmethod
def viewTransformation(fromV: "Tuple", toV: "Tuple", upV: "Tuple"):
forward = (toV-fromV).normalize()
upn = upV.normalize()
left = forward.cross(upn)
trueUp = left.cross(forward)
orientation = Matrix(matrix=np.array([
[left.x, left.y, left.z, 0],
[trueUp.x, trueUp.y, trueUp.z, 0],
[-forward.x, -forward.y, -forward.z, 0],
[0, 0, 0, 1]
]))
return orientation*Matrix.translation(-fromV.x, -fromV.y, -fromV.z)
"""
Make sure you are on ~/src
---------------------------------------------------
nosetests -v ../test/MatrixTest.py:test_viewTransformation
--- OR ----
python3 -m nose -v ../test/MatrixTest.py:test_viewTransformation
--- OR ----
python -m nose -v ../test/MatrixTest.py:test_viewTransformation
---------------------------------------------------
""" | 3dRenderPy | /3dRenderPy-0.0.5.tar.gz/3dRenderPy-0.0.5/src/RenderPy/Matrix.py | Matrix.py |
This is a simple Python package for hexapod IK calculations.
Commands:
To be imported as ikengine
class IKEngine # initialises the class object. Takes 4 arguments in mm - coxaLength, femurLength, tibiaLength and bodySideLength. Optionally can take a 5th argument that can either be a list or a tuple. Please pass the servos that need to be reversed into this tuple/list. They will be reversed (angle = 180 - angle) for the whole runtime of your program that utilises this library.
shift_lean(posX, posY, posZ, rotX, rotY, rotZ) # returns an array of 18 servo angles that are calculated using IK from the given variables that correspond to the translation and tilt of the body of the hexapod. The order goes from tibia to coxa, from left to right and then from front to back
Any questions or suggestions? Please feel free to contact me at macaquedev@gmail.com
| 3dof-hexapod-ik-generator | /3dof-hexapod-ik-generator-1.0.0.tar.gz/3dof-hexapod-ik-generator-1.0.0/README.md | README.md |
from math import cos, sin, tan, acos, atan, atan2, pi
from timeit import timeit
class IKEngine:
def __init__(self, coxaLength, femurLength, tibiaLength, bodySideLength, reversed_servos_tuple=(19, 20, 21)): # Params to be given in millimetres. These equations only work with a hexapod with 3 degrees of freedom and legs spaced equally around the centre of the body.
self._reversed_servos = reversed_servos_tuple
self._coxaLength = coxaLength
self._femurLength = femurLength
self._tibiaLength = tibiaLength
self._bodySideLength = bodySideLength
self._bodyCenterOffset1 = bodySideLength >> 1
self._bodyCenterOffset2 = (bodySideLength**2 - self._bodyCenterOffset1**2)**0.5
self._bodyCenterOffsetX_1 = self._bodyCenterOffset1
self._bodyCenterOffsetX_2 = bodySideLength
self._bodyCenterOffsetX_3 = self._bodyCenterOffset1
self._bodyCenterOffsetX_4 = -self._bodyCenterOffset1
self._bodyCenterOffsetX_5 = bodySideLength
self._bodyCenterOffsetX_6 = -self._bodyCenterOffset1
self._bodyCenterOffsetY_1 = self._bodyCenterOffset2
self._bodyCenterOffsetY_2 = 0
self._bodyCenterOffsetY_3 = -self._bodyCenterOffset2
self._bodyCenterOffsetY_4 = -self._bodyCenterOffset2
self._bodyCenterOffsetY_5 = 0
self._bodyCenterOffsetY_6 = self._bodyCenterOffset2
self._feetPosX_1 = cos(60/180*pi)*(coxaLength + femurLength)
self._feetPosZ_1 = tibiaLength
self._feetPosY_1 = sin(60/180*pi) * (coxaLength + femurLength)
self._feetPosX_2 = coxaLength + femurLength
self._feetPosZ_2 = tibiaLength
self._feetPosY_2 = 0
self._feetPosX_3 = cos(60/180*pi)*(coxaLength + femurLength)
self._feetPosZ_3 = tibiaLength
self._feetPosY_3 = sin(-60/180*pi) * (coxaLength + femurLength)
self._feetPosX_4 = -cos(60/180*pi)*(coxaLength + femurLength)
self._feetPosZ_4 = tibiaLength
self._feetPosY_4 = sin(-60/180*pi) * (coxaLength + femurLength)
self._feetPosX_5 = -(coxaLength + femurLength)
self._feetPosZ_5 = tibiaLength
self._feetPosY_5 = 0
self._feetPosX_6 = -cos(60/180*pi)*(coxaLength + femurLength)
self._feetPosZ_6 = tibiaLength
self._feetPosY_6 = sin(60/180*pi) * (coxaLength + femurLength)
def shift_lean(self, posX=50, posY=0, posZ=0, rotX=0, rotY=0, rotZ=0):
self._totalY_1 = self._feetPosY_1 + self._bodyCenterOffsetY_1 + posY
self._totalX_1 = self._feetPosX_1 + self._bodyCenterOffsetX_1 + posX
self._distBodyCenterFeet_1 = (self._totalY_1**2 + self._totalX_1**2)**0.5
self._angleBodyCenterX_1 = pi/2 - atan2(self._totalX_1, self._totalY_1)
self._rollZ_1 = tan(rotZ * pi / 180) * self._totalX_1
self._pitchZ_1 = tan(rotX * pi / 180) * self._totalY_1
self._bodyIKX_1 = cos(self._angleBodyCenterX_1 + (rotY * pi/180)) * self._distBodyCenterFeet_1 - self._totalX_1
self._bodyIKY_1 = (sin(self._angleBodyCenterX_1 + (rotY * pi/180)) * self._distBodyCenterFeet_1) - self._totalY_1
self._bodyIKZ_1 = self._rollZ_1 + self._pitchZ_1
self._totalY_2 = self._feetPosY_2 + self._bodyCenterOffsetY_2 + posY
self._totalX_2 = self._feetPosX_2 + self._bodyCenterOffsetX_2 + posX
self._distBodyCenterFeet_2 = (self._totalY_2**2 + self._totalX_2**2)**0.5
self._angleBodyCenterX_2 = pi/2 - atan2(self._totalX_2, self._totalY_2)
self._rollZ_2 = tan(rotZ * pi / 180) * self._totalX_2
self._pitchZ_2 = tan(rotX * pi / 180) * self._totalY_2
self._bodyIKX_2 = cos(self._angleBodyCenterX_2 + (rotY * pi/180)) * self._distBodyCenterFeet_2 - self._totalX_2
self._bodyIKY_2 = (sin(self._angleBodyCenterX_2 + (rotY * pi/180)) * self._distBodyCenterFeet_2) - self._totalY_2
self._bodyIKZ_2 = self._rollZ_2 + self._pitchZ_2
self._totalY_3 = self._feetPosY_3 + self._bodyCenterOffsetY_3 + posY
self._totalX_3 = self._feetPosX_3 + self._bodyCenterOffsetX_3 + posX
self._distBodyCenterFeet_3 = (self._totalY_3**2 + self._totalX_3**2)**0.5
self._angleBodyCenterX_3 = pi/2 - atan2(self._totalX_3, self._totalY_3)
self._rollZ_3 = tan(rotZ * pi / 180) * self._totalX_3
self._pitchZ_3 = tan(rotX * pi / 180) * self._totalY_3
self._bodyIKX_3 = cos(self._angleBodyCenterX_3 + (rotY * pi/180)) * self._distBodyCenterFeet_3 - self._totalX_3
self._bodyIKY_3 = (sin(self._angleBodyCenterX_3 + (rotY * pi/180)) * self._distBodyCenterFeet_3) - self._totalY_3
self._bodyIKZ_3 = self._rollZ_3 + self._pitchZ_3
self._totalY_4 = self._feetPosY_4 + self._bodyCenterOffsetY_4 + posY
self._totalX_4 = self._feetPosX_4 + self._bodyCenterOffsetX_4 + posX
self._distBodyCenterFeet_4 = (self._totalY_4**2 + self._totalX_4**2)**0.5
self._angleBodyCenterX_4 = pi/2 - atan2(self._totalX_4, self._totalY_4)
self._rollZ_4 = tan(rotZ * pi / 180) * self._totalX_4
self._pitchZ_4 = tan(rotX * pi / 180) * self._totalY_4
self._bodyIKX_4 = cos(self._angleBodyCenterX_4 + (rotY * pi/180)) * self._distBodyCenterFeet_4 - self._totalX_4
self._bodyIKY_4 = (sin(self._angleBodyCenterX_4 + (rotY * pi/180)) * self._distBodyCenterFeet_4) - self._totalY_4
self._bodyIKZ_4 = self._rollZ_4 + self._pitchZ_4
self._totalY_5 = self._feetPosY_5 + self._bodyCenterOffsetY_5 + posY
self._totalX_5 = self._feetPosX_5 + self._bodyCenterOffsetX_5 + posX
self._distBodyCenterFeet_5 = (self._totalY_5**2 + self._totalX_5**2)**0.5
self._angleBodyCenterX_5 = pi/2 - atan2(self._totalX_5, self._totalY_5)
self._rollZ_5 = tan(rotZ * pi / 180) * self._totalX_5
self._pitchZ_5 = tan(rotX * pi / 180) * self._totalY_5
self._bodyIKX_5 = cos(self._angleBodyCenterX_5 + (rotY * pi/180)) * self._distBodyCenterFeet_5 - self._totalX_5
self._bodyIKY_5 = (sin(self._angleBodyCenterX_5 + (rotY * pi/180)) * self._distBodyCenterFeet_5) - self._totalY_5
self._bodyIKZ_5 = self._rollZ_5 + self._pitchZ_5
self._totalY_6 = self._feetPosY_6 + self._bodyCenterOffsetY_6 + posY
self._totalX_6 = self._feetPosX_6 + self._bodyCenterOffsetX_6 + posX
self._distBodyCenterFeet_6 = (self._totalY_6**2 + self._totalX_6**2)**0.5
self._angleBodyCenterX_6 = pi/2 - atan2(self._totalX_6, self._totalY_6)
self._rollZ_6 = tan(rotZ * pi / 180) * self._totalX_6
self._pitchZ_6 = tan(rotX * pi / 180) * self._totalY_6
self._bodyIKX_6 = cos(self._angleBodyCenterX_6 + (rotY * pi/180)) * self._distBodyCenterFeet_6 - self._totalX_6
self._bodyIKY_6 = (sin(self._angleBodyCenterX_6 + (rotY * pi/180)) * self._distBodyCenterFeet_6) - self._totalY_6
self._bodyIKZ_6 = self._rollZ_6 + self._pitchZ_6
self._newPosX_1 = self._feetPosX_1 + posX + self._bodyIKX_1
self._newPosY_1 = self._feetPosY_1 + posY + self._bodyIKY_1
self._newPosZ_1 = self._feetPosZ_1 + posZ + self._bodyIKZ_1
self._coxaFeetDist_1 = (self._newPosX_1**2 + self._newPosY_1**2)**0.5
self._IKSW_1 = ((self._coxaFeetDist_1 - self._coxaLength)**2 + self._newPosZ_1**2)**0.5
self._IKA1_1 = atan((self._coxaFeetDist_1 - self._coxaLength)/self._newPosZ_1)
self._IKA2_1 = acos((self._tibiaLength**2 - self._femurLength**2 - self._IKSW_1**2)/(-2 * self._IKSW_1 * self._femurLength))
self._TAngle_1 = acos((self._IKSW_1**2 - self._tibiaLength**2 - self._femurLength**2) / (-2 * self._femurLength * self._tibiaLength))
self._IKTibiaAngle_1 = 90 - self._TAngle_1 * 180 / pi
self._IKFemurAngle_1 = 90 - (self._IKA1_1 + self._IKA2_1) * 180/pi
self._IKCoxaAngle_1 = 90 - atan2(self._newPosX_1, self._newPosY_1) * 180 / pi
self._newPosX_2 = self._feetPosX_2 + posX + self._bodyIKX_2
self._newPosY_2 = self._feetPosY_2 + posY + self._bodyIKY_2
self._newPosZ_2 = self._feetPosZ_2 + posZ + self._bodyIKZ_2
self._coxaFeetDist_2 = (self._newPosX_2**2 + self._newPosY_2**2)**0.5
self._IKSW_2 = ((self._coxaFeetDist_2 - self._coxaLength)**2 + self._newPosZ_2**2)**0.5
self._IKA1_2 = atan((self._coxaFeetDist_2 - self._coxaLength)/self._newPosZ_2)
self._IKA2_2 = acos((self._tibiaLength**2 - self._femurLength**2 - self._IKSW_2**2)/(-2 * self._IKSW_2 * self._femurLength))
self._TAngle_2 = acos((self._IKSW_2**2 - self._tibiaLength**2 - self._femurLength**2) / (-2 * self._femurLength * self._tibiaLength))
self._IKTibiaAngle_2 = 90 - self._TAngle_2 * 180 / pi
self._IKFemurAngle_2 = 90 - (self._IKA1_2 + self._IKA2_2) * 180/pi
self._IKCoxaAngle_2 = 90 - atan2(self._newPosX_2, self._newPosY_2) * 180 / pi
self._newPosX_3 = self._feetPosX_3 + posX + self._bodyIKX_3
self._newPosY_3 = self._feetPosY_3 + posY + self._bodyIKY_3
self._newPosZ_3 = self._feetPosZ_3 + posZ + self._bodyIKZ_3
self._coxaFeetDist_3 = (self._newPosX_3**2 + self._newPosY_3**2)**0.5
self._IKSW_3 = ((self._coxaFeetDist_3 - self._coxaLength)**2 + self._newPosZ_3**2)**0.5
self._IKA1_3 = atan((self._coxaFeetDist_3 - self._coxaLength)/self._newPosZ_3)
self._IKA2_3 = acos((self._tibiaLength**2 - self._femurLength**2 - self._IKSW_3**2)/(-2 * self._IKSW_3 * self._femurLength))
self._TAngle_3 = acos((self._IKSW_3**2 - self._tibiaLength**2 - self._femurLength**2) / (-2 * self._femurLength * self._tibiaLength))
self._IKTibiaAngle_3 = 90 - self._TAngle_3 * 180 / pi
self._IKFemurAngle_3 = 90 - (self._IKA1_3 + self._IKA2_3) * 180/pi
self._IKCoxaAngle_3 = 90 - atan2(self._newPosX_3, self._newPosY_3) * 180 / pi
self._newPosX_4 = self._feetPosX_4 + posX + self._bodyIKX_4
self._newPosY_4 = self._feetPosY_4 + posY + self._bodyIKY_4
self._newPosZ_4 = self._feetPosZ_4 + posZ + self._bodyIKZ_4
self._coxaFeetDist_4 = (self._newPosX_4**2 + self._newPosY_4**2)**0.5
self._IKSW_4 = ((self._coxaFeetDist_4 - self._coxaLength)**2 + self._newPosZ_4**2)**0.5
self._IKA1_4 = atan((self._coxaFeetDist_4 - self._coxaLength)/self._newPosZ_4)
self._IKA2_4 = acos((self._tibiaLength**2 - self._femurLength**2 - self._IKSW_4**2)/(-2 * self._IKSW_4 * self._femurLength))
self._TAngle_4 = acos((self._IKSW_4**2 - self._tibiaLength**2 - self._femurLength**2) / (-2 * self._femurLength * self._tibiaLength))
self._IKTibiaAngle_4 = 90 - self._TAngle_4 * 180 / pi
self._IKFemurAngle_4 = 90 - (self._IKA1_4 + self._IKA2_4) * 180/pi
self._IKCoxaAngle_4 = 90 - atan2(self._newPosX_4, self._newPosY_4) * 180 / pi
self._newPosX_5 = self._feetPosX_5 + posX + self._bodyIKX_5
self._newPosY_5 = self._feetPosY_5 + posY + self._bodyIKY_5
self._newPosZ_5 = self._feetPosZ_5 + posZ + self._bodyIKZ_5
self._coxaFeetDist_5 = (self._newPosX_5**2 + self._newPosY_5**2)**0.5
self._IKSW_5 = ((self._coxaFeetDist_5 - self._coxaLength)**2 + self._newPosZ_5**2)**0.5
self._IKA1_5 = atan((self._coxaFeetDist_5 - self._coxaLength)/self._newPosZ_5)
self._IKA2_5 = acos((self._tibiaLength**2 - self._femurLength**2 - self._IKSW_5**2)/(-2 * self._IKSW_5 * self._femurLength))
self._TAngle_5 = acos((self._IKSW_5**2 - self._tibiaLength**2 - self._femurLength**2) / (-2 * self._femurLength * self._tibiaLength))
self._IKTibiaAngle_5 = 90 - self._TAngle_5 * 180 / pi
self._IKFemurAngle_5 = 90 - (self._IKA1_5 + self._IKA2_5) * 180/pi
self._IKCoxaAngle_5 = 90 - atan2(self._newPosX_5, self._newPosY_5) * 180 / pi
self._newPosX_6 = self._feetPosX_6 + posX + self._bodyIKX_6
self._newPosY_6 = self._feetPosY_6 + posY + self._bodyIKY_6
self._newPosZ_6 = self._feetPosZ_6 + posZ + self._bodyIKZ_6
self._coxaFeetDist_6 = (self._newPosX_6**2 + self._newPosY_6**2)**0.5
self._IKSW_6 = ((self._coxaFeetDist_6 - self._coxaLength)**2 + self._newPosZ_6**2)**0.5
self._IKA1_6 = atan((self._coxaFeetDist_6 - self._coxaLength)/self._newPosZ_6)
self._IKA2_6 = acos((self._tibiaLength**2 - self._femurLength**2 - self._IKSW_6**2)/(-2 * self._IKSW_6 * self._femurLength))
self._TAngle_6 = acos((self._IKSW_6**2 - self._tibiaLength**2 - self._femurLength**2) / (-2 * self._femurLength * self._tibiaLength))
self._IKTibiaAngle_6 = 90 - self._TAngle_6 * 180 / pi
self._IKFemurAngle_6 = 90 - (self._IKA1_6 + self._IKA2_6) * 180/pi
self._IKCoxaAngle_6 = 90 - atan2(self._newPosX_6, self._newPosY_6) * 180 / pi
self._coxaAngle_1 = self._IKCoxaAngle_1 - 60
self._femurAngle_1 = self._IKFemurAngle_1
self._tibiaAngle_1 = self._IKTibiaAngle_1
self._coxaAngle_2 = self._IKCoxaAngle_2
self._femurAngle_2 = self._IKFemurAngle_2
self._tibiaAngle_2 = self._IKTibiaAngle_2
self._coxaAngle_3 = self._IKCoxaAngle_3 + 60
self._femurAngle_3 = self._IKFemurAngle_3
self._tibiaAngle_3 = self._IKTibiaAngle_3
self._coxaAngle_4 = self._IKCoxaAngle_4 - 240
self._femurAngle_4 = self._IKFemurAngle_4
self._tibiaAngle_4 = self._IKTibiaAngle_4
self._coxaAngle_5 = self._IKCoxaAngle_5 - 180
self._femurAngle_5 = self._IKFemurAngle_5
self._tibiaAngle_5 = self._IKTibiaAngle_5
self._coxaAngle_6 = self._IKCoxaAngle_6 - 120
self._femurAngle_6 = self._IKFemurAngle_6
self._tibiaAngle_6 = self._IKTibiaAngle_6
angles = [
self._tibiaAngle_6,
self._femurAngle_6,
self._coxaAngle_6,
self._tibiaAngle_1,
self._femurAngle_1,
self._coxaAngle_1,
self._tibiaAngle_5,
self._femurAngle_5,
self._coxaAngle_5,
self._tibiaAngle_2,
self._femurAngle_2,
self._coxaAngle_2,
self._tibiaAngle_4,
self._femurAngle_4,
self._coxaAngle_4,
self._tibiaAngle_3,
self._femurAngle_3,
self._coxaAngle_3,
]
angles = [round(i+90) for i in angles]
for index, angle in enumerate(angles):
if index in self._reversed_servos:
angles[index] = 180-angle
return angles
if __name__ == '__main__':
a = IKEngine(50, 115, 90, 80)
a.cartesian_to_servo(0, 116, 116, 0) | 3dof-hexapod-ik-generator | /3dof-hexapod-ik-generator-1.0.0.tar.gz/3dof-hexapod-ik-generator-1.0.0/sample/ikengine/__init__.py | __init__.py |
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Dict, Optional, Union, Callable, Sequence
from abc import ABC, abstractmethod
from itertools import product
import functools
import numpy as np
LINKAGE_CELL_OPTIONS = ['single', 'complete', 'average']
LINKAGE_REGION_OPTIONS = ['single', 'complete', 'average', 'homolog_avg', 'homolog_mnn']
@dataclass
class Mergeable(ABC):
id_num: int
@property
@abstractmethod
def num_original(self) -> int:
pass
@property
@abstractmethod
def region(self) -> int:
pass
@property
@abstractmethod
def transcriptome(self) -> np.array:
pass
# @classmethod
# @abstractmethod
# def merge(cls, m1: Mergeable, m2: Mergeable, dist: float, new_id: int, region_id: Optional[int] = None) -> Mergeable:
# pass
# noinspection PyArgumentList
@staticmethod
def _pairwise_diff(lhs_transcriptome: np.array, rhs_transcriptome: np.array,
affinity: Callable, linkage: str) -> float:
lhs_len = lhs_transcriptome.shape[0]
rhs_len = rhs_transcriptome.shape[0]
dists = np.zeros((lhs_len, rhs_len))
# Compute distance matrix
# essentially only useful if this is working on merged cell types
# otherwise just produces a matrix containing one value
for ct1_idx, ct2_idx in product(range(lhs_len), range(rhs_len)):
dists[ct1_idx, ct2_idx] = affinity(lhs_transcriptome[ct1_idx], rhs_transcriptome[ct2_idx])
if linkage == 'single':
dist = dists.min()
elif linkage == 'complete':
dist = dists.max()
else: # default to 'average'
dist = dists.mean()
return dist
@staticmethod
@abstractmethod
def diff(lhs: Mergeable, rhs: Mergeable, affinity: Callable, linkage: str,
affinity2: Optional[Callable] = None, linkage2: Optional[str] = None,
mask: Optional[Sequence] = None, mask2: Optional[Sequence] = None):
pass
@dataclass
class CellType(Mergeable):
_region: int
_transcriptome: np.array
@property
def transcriptome(self) -> np.array:
if len(self._transcriptome.shape) == 1:
return self._transcriptome.reshape(1, -1)
return self._transcriptome
@property
def num_original(self):
return self.transcriptome.shape[0]
@property
def region(self):
return self._region
@region.setter
def region(self, r: int):
self._region = r
@classmethod
def merge(cls, m1: CellType, m2: CellType, new_id: int, region_id: Optional[int] = None) -> CellType:
# must be in same region if not being created into a new region
if region_id is None:
assert m1.region == m2.region, \
'Tried merging cell types from different regions without new target region.'
region_id = m1.region
return cls(new_id, region_id, np.row_stack((m1.transcriptome, m2.transcriptome)))
@staticmethod
def diff(lhs: CellType, rhs: CellType, affinity: Callable, linkage: str,
affinity2: Optional[Callable] = None, linkage2: Optional[str] = None,
mask: Optional[Sequence] = None, mask2: Optional[Sequence] = None):
lt = lhs.transcriptome if mask is None else lhs.transcriptome[:, mask]
rt = rhs.transcriptome if mask is None else rhs.transcriptome[:, mask]
return CellType._pairwise_diff(lt, rt, affinity, linkage)
def __repr__(self):
return f'{self.region}.{self.id_num}'
@dataclass
class Region(Mergeable):
cell_types: Optional[Dict[int, CellType]] = field(default_factory=dict)
_transcriptome: Optional[np.array] = None
@property
def transcriptome(self) -> np.array:
if self._transcriptome is None:
raise ValueError(f'Transcriptome for region {self.id_num} never defined.')
if len(self._transcriptome.shape) == 1:
return self._transcriptome.reshape(1, -1)
return self._transcriptome
@property
def child_transcriptomes(self) -> np.array:
# ugly -- should refactor something
ct_list = list(self.cell_types.values())
transcriptome_length = ct_list[0].transcriptome.shape[1]
transcriptomes = np.zeros((len(self.cell_types), transcriptome_length))
for c in range(len(self.cell_types)):
transcriptomes[c] = ct_list[c].transcriptome
return transcriptomes
@property
def num_original(self):
if self._transcriptome is None:
return np.sum([ct.num_original for ct in self.cell_types.values()])
else:
return self.transcriptome.shape[0]
@property
def num_cell_types(self):
return len(self.cell_types)
@property
def region(self):
return self.id_num
# @classmethod
# def merge(cls, m1: Region, m2: Region, dist: float, new_id: int, region_id: Optional[int] = None) -> Region:
# pass
# noinspection PyArgumentList
@staticmethod
def diff(lhs: Region, rhs: Region, affinity: Callable, linkage: str,
affinity2: Optional[Callable] = None, linkage2: Optional[str] = None,
mask: Optional[np.array] = None, mask2: Optional[Sequence] = None):
"""
Compute the distance between two regions.
:param lhs: The lhs region
:param rhs: The rhs region
:param affinity: Affinity for transcriptome comparisons for region distances
:param linkage: Linkage for region distances
:param affinity2: Affinity for transcriptome comparisons for cell types distances
:param linkage2: Linkage for cell type distances
:param mask: Region gene mask
:param mask2: Cell type gene mask
:return: dist, num_ct_diff
"""
# Difference in number of cell types contained. Only really matters for homolog_mnn since it can change there
num_ct_diff = np.abs(lhs.num_cell_types - rhs.num_cell_types)
if (lhs._transcriptome is None) or (rhs._transcriptome is None):
if (affinity2 is None) or (linkage2 is None):
raise ValueError('Both affinity and linkage must be defined for cell types')
# Cell type dists using cell type gene mask
ct_dists = np.zeros((lhs.num_cell_types, rhs.num_cell_types))
# Cell type dists using region gene mask
r_ct_dists = np.zeros((lhs.num_cell_types, rhs.num_cell_types))
r1_ct_list = list(lhs.cell_types.values())
r2_ct_list = list(rhs.cell_types.values())
for r1_idx, r2_idx in product(range(lhs.num_cell_types), range(rhs.num_cell_types)):
# Use the cell type gene mask here because matching up sister cell types
ct_dists[r1_idx, r2_idx] = CellType.diff(r1_ct_list[r1_idx], r2_ct_list[r2_idx],
affinity=affinity2, linkage=linkage2, mask=mask2)
r_ct_dists[r1_idx, r2_idx] = CellType.diff(r1_ct_list[r1_idx], r2_ct_list[r2_idx],
affinity=affinity2, linkage=linkage2, mask=mask)
if linkage == 'single':
dist = r_ct_dists.min()
elif linkage == 'complete':
dist = r_ct_dists.max()
elif linkage == 'homolog_avg':
dists = []
for i in range(np.min(ct_dists.shape)):
ct_min1_idx, ct_min2_idx = np.unravel_index(np.argmin(ct_dists), ct_dists.shape)
# Add the distance between the two closest cell types (can consider as homologs)
dists.append(r_ct_dists[ct_min1_idx, ct_min2_idx])
# Delete these two homologs from the distance matrix
ct_dists = np.delete(ct_dists, ct_min1_idx, axis=0)
ct_dists = np.delete(ct_dists, ct_min2_idx, axis=1)
r_ct_dists = np.delete(r_ct_dists, ct_min1_idx, axis=0)
r_ct_dists = np.delete(r_ct_dists, ct_min2_idx, axis=1)
dist = np.mean(dists)
elif linkage == 'homolog_mnn':
dists = []
# Nearest neighbors for the cell types from region 1
r1_ct_nn = np.argmin(ct_dists, axis=1)
# Nearest neighbors for the cell types from region 2
r2_ct_nn = np.argmin(ct_dists, axis=0)
# Only append distance if we find a mutual nearest neighbor
for i in range(r1_ct_nn.shape[0]):
if r2_ct_nn[r1_ct_nn[i]] == i:
dists.append(r_ct_dists[i, r1_ct_nn[i]])
num_ct_diff = lhs.num_cell_types + rhs.num_cell_types - (2 * len(dists))
dist = np.mean(dists)
else: # default to 'average':
dist = r_ct_dists.mean()
else:
dist = Region._pairwise_diff(lhs.transcriptome, rhs.transcriptome, affinity, linkage)
return dist, num_ct_diff
def __repr__(self):
return f'{self.id_num}{list(self.cell_types.values())}'
@functools.total_ordering
@dataclass
class Edge:
dist: float
endpt1: Union[CellType, Region]
endpt2: Union[CellType, Region]
def __eq__(self, other):
return self.dist == other.dist
def __lt__(self, other):
return self.dist < other.dist
def __gt__(self, other):
return self.dist > other.dist | 3dtrees-nbingo | /3dtrees_nbingo-0.1.5-py3-none-any.whl/data/data_types.py | data_types.py |
from typing import List, Callable, Tuple, Optional, Dict, Union
from queue import PriorityQueue
from data.data_loader import DataLoader
from itertools import combinations, product
from data.data_types import Region, CellType, Edge, Mergeable, LINKAGE_CELL_OPTIONS, LINKAGE_REGION_OPTIONS
from tqdm import tqdm
from matplotlib import cm
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import functools
# Need for 3D plotting, even though not used directly. Python is dumb
# noinspection PyUnresolvedReferences
from mpl_toolkits.mplot3d import Axes3D
TREE_SCORE_OPTIONS = ['ME', 'BME', 'MP']
@functools.total_ordering
class Agglomerate3D:
def __init__(self,
linkage_cell: str,
linkage_region: str,
cell_type_affinity: Callable,
region_affinity: Optional[Callable] = None,
max_region_diff: Optional[int] = 0,
region_dist_scale: Optional[float] = 1,
verbose: Optional[bool] = False,
pbar: Optional[bool] = False,
integrity_check: Optional[bool] = True):
self.linkage_cell: str = linkage_cell
self.linkage_region: str = linkage_region
self.cell_type_affinity: Callable = cell_type_affinity
self.region_affinity: Callable = region_affinity
self.max_region_diff: int = max_region_diff
self.region_dist_scale: float = region_dist_scale
self.verbose: bool = verbose
self.integrity_check: bool = integrity_check
self.linkage_history: List[Dict[str, int]] = []
self._linkage_mat: pd.DataFrame = pd.DataFrame()
self.regions: Dict[int, Region] = {}
self.cell_types: Dict[int, CellType] = {}
self.orig_cell_types: Dict[int, CellType] = {}
self._ct_id_idx: int = 0
self._r_id_idx: int = 0
self.ct_names: List[str] = []
self.r_names: List[str] = []
self._ct_axis_mask = None
self._r_axis_mask = None
self._pbar = tqdm() if pbar else None
if linkage_cell not in LINKAGE_CELL_OPTIONS:
raise UserWarning(f'Incorrect argument passed in for cell linkage. Must be one of {LINKAGE_CELL_OPTIONS}')
if linkage_region not in LINKAGE_REGION_OPTIONS:
raise UserWarning(f'Incorrect argument passed in for region linkage. Must be one of '
f'{LINKAGE_REGION_OPTIONS}')
def __repr__(self):
return f'Agglomerate3D<cell_type_affinity={self.cell_type_affinity}, ' \
f'linkage_cell={self.linkage_cell}, ' \
f'linkage_region={self.linkage_region}, ' \
f'max_region_diff={self.max_region_diff}, ' \
f'region_dist_scale={self.region_dist_scale}>'
def __eq__(self, other):
return len(self.linkage_mat.index) == len(other.linkage_mat.index)
def __lt__(self, other):
return len(self.linkage_mat.index) < len(other.linkage_mat.index)
@property
def linkage_mat(self) -> pd.DataFrame:
if self._linkage_mat.empty:
return pd.DataFrame(self.linkage_history)
return self._linkage_mat
def view_tree3d(self):
lm = self.linkage_mat
segments = []
colors = []
num_regions = lm['In region'].max() + 1
colormap = cm.get_cmap('hsv')(np.linspace(0, 1, num_regions))
fig = plt.figure()
ax = fig.gca(projection='3d')
def find_ct_index_region(ct_id: int, index: int) -> Tuple[Union[int, None], Union[int, None]]:
if np.isnan(ct_id):
return np.nan, np.nan
# Cutoff at where we currently are so we don't repeat rows
lm_bound = lm.loc[:index - 1]
no_reg = lm_bound[~lm_bound['Is region']]
ct_row = no_reg[no_reg['New ID'] == ct_id]
# one of the original cell types and at the end of a branch
if ct_row.empty:
return None, self.orig_cell_types[ct_id].region
# not one of the original ones, so have to check which region it's in
return ct_row.index[-1], ct_row['In region'].iat[-1]
def segment_builder(level: int, index: int, root_pos: List[int]):
offset = 2 ** (level - 1) # subtract 1 to divide by 2 since it's only half the line
# figure out where to recurse on
ct1_id, ct2_id = lm.loc[index, ['ID1', 'ID2']]
ct1_index, ct1_region = find_ct_index_region(ct1_id, index)
ct2_index, ct2_region = find_ct_index_region(ct2_id, index)
if lm.loc[index, 'In reg merge']:
# We're drawing on the y-axis
split_axis = 1
# Find the region we're merging in
region = lm.loc[index, 'In region']
region_mat = lm[lm['In region'] == region]
dist = region_mat[region_mat['Is region']]['Distance'].iat[0]
else:
# We're drawing on the x-axis
split_axis = 0
dist = lm.loc[index, 'Distance']
# To have the correct order of recursion so region splits match up
# Also the case in which a cell type is just transferred between regions
if (np.isnan(ct2_region)) or (ct1_region < ct2_region):
l_index = None if ct1_index == index else ct1_index
l_id = ct1_id
l_region = ct1_region
r_index = ct2_index
r_id = ct2_id
r_region = ct2_region
else:
l_index = ct2_index
l_id = ct2_id
l_region = ct2_region
r_index = ct1_index
r_id = ct1_id
r_region = ct1_region
# horizontal x/y-axis bar
# Start is the left side
h_start = root_pos.copy()
h_start[split_axis] -= offset
# end is the right side
h_end = root_pos.copy()
h_end[split_axis] += offset
segments.append([h_start, root_pos])
colors.append(colormap[l_region])
# Don't do if just transferring one cell type to another region
if ~np.isnan(r_region):
segments.append([root_pos, h_end])
colors.append(colormap[r_region])
# vertical z-axis bars
v_left_end = h_start.copy()
v_left_end[2] -= dist
v_right_end = h_end.copy()
v_right_end[2] -= dist
segments.append([h_start, v_left_end])
colors.append(colormap[l_region])
# Don't do if just transferring one cell type to another region
if ~np.isnan(r_region):
segments.append([h_end, v_right_end])
colors.append(colormap[r_region])
# don't recurse if at leaf, but do label
if l_index is None:
label = self.ct_names[int(l_id)]
ax.text(*v_left_end, label, 'z')
else:
segment_builder(level - 1, l_index, v_left_end)
# Don't do if just transferring one cell type to another region
if ~np.isnan(r_region):
if r_index is None:
label = self.ct_names[int(r_id)]
ax.text(*v_right_end, label, 'z')
else:
segment_builder(level - 1, r_index, v_right_end)
# Create root pos z-pos as max of sum of region and ct distances
top_root_pos = [0, 0, lm['Distance'].sum()]
top_level = len(lm.index) - 1
# Should only happen if our tree starts with a region merger, which must consist of two cell types
if lm.loc[top_level, 'Is region']:
segment_builder(top_level, top_level - 1, top_root_pos)
else:
segment_builder(top_level, top_level, top_root_pos)
segments = np.array(segments)
x = segments[:, :, 0].flatten()
y = segments[:, :, 1].flatten()
z = segments[:, :, 2].flatten()
ax.set_zlim(z.min(), z.max())
ax.set_xlim(x.min(), x.max())
ax.set_ylim(y.min(), y.max())
ax.set_xlabel('Cell type', fontsize=20)
ax.set_ylabel('Region', fontsize=20)
ax.set_zlabel('Distance', fontsize=20)
ax.set(xticklabels=[], yticklabels=[])
for line, color in zip(segments, colors):
ax.plot(line[:, 0], line[:, 1], line[:, 2], color=color, lw=2)
plt.show()
def _assert_integrity(self):
# Make sure all cell types belong to their corresponding region
for ct_id in self.cell_types:
assert self.cell_types[ct_id].id_num == ct_id, 'Cell type dict key-value mismatch'
assert ct_id in self.regions[self.cell_types[ct_id].region].cell_types, 'Cell type not in indicated region.'
for r in self.regions.values():
for ct_id in r.cell_types:
assert r.cell_types[ct_id].id_num == ct_id, 'Within region cell type dict key-value mismatch'
assert ct_id in self.cell_types, 'Region has cell type that does not exist recorded cell types.'
def _trace_all_root_leaf_paths(self) -> Dict[int, List[int]]:
assert not self.linkage_mat.empty, 'Tried tracing empty tree.'
paths: Dict[int, List[int]] = {}
# Get only cell types
lm = self.linkage_mat[~self.linkage_mat['Is region']]
# Reduce to only ID numbers in numpy
lm = lm[['ID1', 'ID2', 'New ID']].to_numpy()
# Aliases for numpy indices
ids = [0, 1]
new_id = 2
def dfs(row: np.array, path: List[int]):
for id_idx in ids:
# If there's a child on the side we're looking at
if ~np.isnan(row[id_idx]):
# Is it a leaf node
if row[id_idx] in self.orig_cell_types:
path.append(row[id_idx])
paths[row[id_idx]] = path.copy()
path.pop()
else:
# choose
path.append(row[id_idx])
# explore
dfs(lm[lm[:, new_id] == row[id_idx]].squeeze(), path)
# un-choose
path.pop()
dfs(lm[-1], [lm[-1, new_id]])
return paths
def _compute_orig_ct_path_dists(self):
num_ct = len(self.orig_cell_types)
dists = np.zeros((num_ct, num_ct))
paths = self._trace_all_root_leaf_paths()
for ct1_idx, ct2_idx in product(range(num_ct), range(num_ct)):
ct1_path = paths[ct1_idx][::-1]
ct2_path = paths[ct2_idx][::-1]
while (len(ct1_path) > 0) and (len(ct2_path) > 0) and (ct1_path[-1] == ct2_path[-1]):
ct1_path.pop()
ct2_path.pop()
dists[ct1_idx, ct2_idx] = len(ct1_path) + len(ct2_path)
return dists
def _compute_orig_ct_linkage_dists(self):
num_ct = len(self.orig_cell_types)
dists = np.zeros((num_ct, num_ct))
for ct1_idx, ct2_idx in product(range(num_ct), range(num_ct)):
dists[ct1_idx, ct2_idx] = CellType.diff(self.orig_cell_types[ct1_idx], self.orig_cell_types[ct2_idx],
affinity=self.cell_type_affinity, linkage=self.linkage_cell,
mask=self._ct_axis_mask)
return dists
def _compute_bme_score(self) -> float:
path_dists = self._compute_orig_ct_path_dists()
linkage_dists = self._compute_orig_ct_linkage_dists()
normalized_dists = linkage_dists / (2 ** path_dists)
return normalized_dists.sum()
def _compute_me_score(self) -> float:
# Get only the rows that make sense to sum
to_sum = self.linkage_mat.loc[self.linkage_mat['Is region'] == self.linkage_mat['In reg merge']]
return to_sum['Distance'].to_numpy().sum()
def _compute_mp_score(self) -> float:
to_sum = self.linkage_mat.loc[self.linkage_mat['Is region'] == self.linkage_mat['In reg merge']]
return to_sum.shape[0]
def compute_tree_score(self, metric: str):
if metric not in TREE_SCORE_OPTIONS:
raise ValueError(f'metric must be one of: {TREE_SCORE_OPTIONS}.')
if metric == 'ME':
return self._compute_me_score()
elif metric == 'MP':
return self._compute_mp_score()
elif metric == 'BME':
return self._compute_bme_score()
def _merge_cell_types(self, ct1: CellType, ct2: CellType, ct_dist: float, region_id: Optional[int] = None):
# Create new cell type and assign to region
new_ct = CellType.merge(ct1, ct2, self._ct_id_idx, region_id)
self.cell_types[self._ct_id_idx] = new_ct
self.regions[new_ct.region].cell_types[new_ct.id_num] = new_ct
self._record_link(ct1, ct2, self.cell_types[self._ct_id_idx], ct_dist)
# remove the old ones
self.cell_types.pop(ct1.id_num)
self.cell_types.pop(ct2.id_num)
self.regions[ct1.region].cell_types.pop(ct1.id_num)
self.regions[ct2.region].cell_types.pop(ct2.id_num)
if self.verbose:
print(f'Merged cell types {ct1} and {ct2} with distance {ct_dist} '
f'to form cell type {self.cell_types[self._ct_id_idx]} with {ct1.num_original + ct2.num_original} '
f'original data points.\n'
f'New cell type dict: {self.cell_types}\n'
f'New region dict: {self.regions}\n')
# increment cell type counter
self._ct_id_idx += 1
# return id of newly created cell type
return self._ct_id_idx - 1 # yeah, this is ugly b/c python doesn't have ++_ct_id_idx
def _merge_regions(self, r1, r2, r_dist):
r1_ct_list = list(r1.cell_types.values())
r2_ct_list = list(r2.cell_types.values())
if self.verbose:
print(f'Merging regions {r1} and {r2} into new region {self._r_id_idx}\n{{')
# create new region
self.regions[self._r_id_idx] = Region(self._r_id_idx)
pairwise_r_ct_dists = np.zeros((len(r1.cell_types), len(r2.cell_types)))
for r1_ct_idx, r2_ct_idx in product(range(len(r1_ct_list)), range(len(r2_ct_list))):
pairwise_r_ct_dists[r1_ct_idx, r2_ct_idx] = CellType.diff(r1_ct_list[r1_ct_idx], r2_ct_list[r2_ct_idx],
affinity=self.cell_type_affinity,
linkage=self.linkage_cell,
mask=self._ct_axis_mask)
# Find the cell types that have to be merged between the two regions
cts_merge: List[Tuple[CellType, CellType]] = []
dists: List[float] = []
if self.linkage_region == 'homolog_mnn':
# Nearest neighbors for the cell types from region 1
r1_ct_nn = np.argmin(pairwise_r_ct_dists, axis=1)
# Nearest neighbors for the cell types from region 2
r2_ct_nn = np.argmin(pairwise_r_ct_dists, axis=0)
# Only append distance if we find a mutual nearest neighbor
for i in range(r1_ct_nn.shape[0]):
if r2_ct_nn[r1_ct_nn[i]] == i:
dists.append(pairwise_r_ct_dists[i, r1_ct_nn[i]])
cts_merge.append((r1_ct_list[i], r2_ct_list[r1_ct_nn[i]]))
# otherwise just do a greedy pairing
else:
while np.prod(pairwise_r_ct_dists.shape) != 0:
ct_merge1_idx, ct_merge2_idx = np.unravel_index(np.argmin(pairwise_r_ct_dists),
pairwise_r_ct_dists.shape)
# Append distance to dists and indices to index list
# noinspection PyArgumentList
dists.append(pairwise_r_ct_dists.min())
cts_merge.append((r1_ct_list[ct_merge1_idx], r2_ct_list[ct_merge2_idx]))
# remove from the distance matrix
pairwise_r_ct_dists = np.delete(pairwise_r_ct_dists, ct_merge1_idx, axis=0)
r1_ct_list.pop(ct_merge1_idx)
pairwise_r_ct_dists = np.delete(pairwise_r_ct_dists, ct_merge2_idx, axis=1)
r2_ct_list.pop(ct_merge2_idx)
assert len(dists) == len(cts_merge), 'Number distances not equal to number of cell type mergers.'
num_ct_diff = r1.num_cell_types + r2.num_cell_types - (2 * len(cts_merge))
# Continuously pair up cell types, merge them, add them to the new region, and delete them
for dist, (ct1, ct2) in zip(dists, cts_merge):
# create new cell type, delete old ones and remove from their regions
# noinspection PyArgumentList
new_ct_id = self._merge_cell_types(ct1, ct2, dist, self._r_id_idx)
# add to our new region
self.regions[self._r_id_idx].cell_types[new_ct_id] = self.cell_types[new_ct_id]
# Should have at least one empty region if not doing mutual nearest neighbors
if self.linkage_region != 'homolog_mnn':
assert r1.num_cell_types == 0 or r2.num_cell_types == 0, 'Both regions non-empty after primary merging.'
# if there is a nonempty region, put the remainder of the cell types in the non-empty region into the new region
for r_leftover in (r1, r2):
for ct in r_leftover.cell_types.values():
# Essentially copy the cell type but into a new region and with a new ID
new_ct = CellType(self._ct_id_idx, self._r_id_idx, ct.transcriptome)
self.cell_types[new_ct.id_num] = new_ct
self.regions[self._r_id_idx].cell_types[new_ct.id_num] = new_ct
# Delete the old cell type
self.cell_types.pop(ct.id_num)
# Record the transfer
self._record_ct_transfer(ct, new_ct)
self._ct_id_idx += 1
r_leftover.cell_types.clear()
# make sure no cell types are leftover in the regions we're about to delete
assert r1.num_cell_types == 0 and r2.num_cell_types == 0, 'Tried deleting non-empty regions.'
self.regions.pop(r1.id_num)
self.regions.pop(r2.id_num)
self._record_link(r1, r2, self.regions[self._r_id_idx], r_dist, num_ct_diff)
if self.verbose:
print(f'Merged regions {r1} and {r2} with distance {r_dist} to form '
f'{self.regions[self._r_id_idx]} with '
f'{self.regions[self._r_id_idx].num_original} original data points.'
f'\nNew region dict: {self.regions}\n}}\n')
self._r_id_idx += 1
return self._r_id_idx - 1
def _record_ct_transfer(self, ct_orig: CellType, ct_new: CellType):
assert ct_orig.region != ct_new.region, 'Tried transferring cell type to the same region'
self.linkage_history.append({'Is region': False,
'ID1': ct_orig.id_num,
'ID2': None,
'New ID': ct_new.id_num,
'Distance': None,
'Num original': ct_new.num_original,
'In region': ct_new.region,
'In reg merge': True,
'Cell type num diff': None
})
def _record_link(self, n1: Mergeable, n2: Mergeable, new_node: Mergeable, dist: float,
ct_num_diff: Optional[int] = None):
# Must be recording the linkage of two things of the same type
assert type(n1) == type(n2), 'Tried recording linkage of a cell type with a region.'
if self._pbar is not None:
self._pbar.update(1)
# record merger in linkage history
region_merger = isinstance(n1, Region) or (n1.region != n2.region)
self.linkage_history.append({'Is region': isinstance(n1, Region),
'ID1': n1.id_num,
'ID2': n2.id_num,
'New ID': new_node.id_num,
'Distance': dist,
'Num original': new_node.num_original,
'In region': new_node.region,
'In reg merge': region_merger,
'Cell type num diff': ct_num_diff
})
@property
def linkage_mat_readable(self):
lm = self.linkage_mat.copy()
id_to_ct = {i: self.ct_names[i] for i in range(len(self.ct_names))}
id_to_r = {i: self.r_names[i] for i in range(len(self.r_names))}
cols = ['ID1', 'ID2', 'New ID']
for i in lm.index:
id_to_x = id_to_r if lm.loc[i, 'Is region'] else id_to_ct
for col in cols:
if lm.loc[i, col] in id_to_x:
lm.loc[i, col] = id_to_x[lm.loc[i, col]]
if lm.loc[i, 'In region'] in id_to_r:
lm.loc[i, 'In region'] = id_to_r[lm.loc[i, 'In region']]
return lm
def agglomerate(self, data_ct: DataLoader, data_r: Optional[DataLoader] = None) -> pd.DataFrame:
self.ct_names = data_ct.get_names()
ct_regions = data_ct.get_corresponding_region_names()
# Building initial regions and cell types
if data_r is None:
self.r_names = np.unique(ct_regions)
self.regions = {r: Region(r) for r in range(len(self.r_names))}
self._ct_axis_mask = data_ct.ct_axis_mask
self._r_axis_mask = data_ct.r_axis_mask
else:
self.r_names = data_r.get_names()
self.regions = {r: Region(r, _transcriptome=data_r[r]) for r in range(len(self.r_names))}
region_to_id: Dict[str, int] = {self.r_names[i]: i for i in range(len(self.r_names))}
for c in range(len(data_ct)):
r_id = region_to_id[ct_regions[c]]
self.orig_cell_types[c] = CellType(c, r_id, data_ct[c])
self.regions[r_id].cell_types[c] = self.orig_cell_types[c]
self.cell_types = self.orig_cell_types.copy()
self._ct_id_idx = len(self.ct_names)
self._r_id_idx = len(self.r_names)
if self._pbar is not None:
self._pbar.total = len(self.ct_names) + len(self.r_names) - 2
# repeat until we're left with one region and one cell type
# not necessarily true evolutionarily, but same assumption as normal dendrogram
while len(self.regions) > 1 or len(self.cell_types) > 1:
ct_dists: PriorityQueue[Edge] = PriorityQueue()
r_dists: PriorityQueue[Edge] = PriorityQueue()
# Compute distances of all possible edges between cell types in the same region
for region in self.regions.values():
for ct1, ct2 in combinations(list(region.cell_types.values()), 2):
dist = CellType.diff(ct1, ct2,
affinity=self.cell_type_affinity,
linkage=self.linkage_cell,
mask=self._ct_axis_mask)
# add the edge with the desired distance to the priority queue
ct_dists.put(Edge(dist, ct1, ct2))
# compute distances between merge-able regions
for r1, r2 in combinations(self.regions.values(), 2):
# condition for merging regions
# regions can only differ by self.max_region_diff number of cell types
if np.abs(r1.num_cell_types - r2.num_cell_types) > self.max_region_diff:
continue
dist, num_ct_diff = Region.diff(r1, r2, affinity=self.region_affinity, linkage=self.linkage_region,
affinity2=self.cell_type_affinity, linkage2=self.linkage_cell,
mask=self._r_axis_mask, mask2=self._ct_axis_mask)
# If we're using region linkage homolog_mnn, then the number of cell types contained different may go up
if num_ct_diff > self.max_region_diff:
continue
r_dists.put(Edge(dist, r1, r2))
# Now go on to merge step!
# Decide whether we're merging cell types or regions
ct_edge = ct_dists.get() if not ct_dists.empty() else None
r_edge = r_dists.get() if not r_dists.empty() else None
# both shouldn't be None
assert not (ct_edge is None and r_edge is None), 'No cell types or regions to merge.'
# we're merging cell types, which gets a slight preference if equal
if ct_edge is not None and ((r_edge is None) or (ct_edge.dist <= r_edge.dist * self.region_dist_scale)):
ct1 = ct_edge.endpt1
ct2 = ct_edge.endpt2
self._merge_cell_types(ct1, ct2, ct_edge.dist)
# we're merging regions
elif r_edge is not None:
# First, we have to match up homologous cell types
# Just look for closest pairs and match them up
r1 = r_edge.endpt1
r2 = r_edge.endpt2
self._merge_regions(r1, r2, r_edge.dist)
if self.integrity_check:
self._assert_integrity()
if self._pbar is not None:
self._pbar.close()
return self.linkage_mat | 3dtrees-nbingo | /3dtrees_nbingo-0.1.5-py3-none-any.whl/agglomerate/agglomerate_3d.py | agglomerate_3d.py |
from typing import Callable, Optional, List, Dict, Iterable, Tuple
from agglomerate.agglomerate_3d import Agglomerate3D, TREE_SCORE_OPTIONS
from itertools import product
from data.data_loader import DataLoader
import multiprocessing as mp
import pandas as pd
import numpy as np
from tqdm import tqdm
class BatchAgglomerate3D:
def __init__(self,
linkage_cell: List[str],
linkage_region: List[str],
cell_type_affinity: List[Callable],
region_affinity: Optional[List[Callable]] = None,
max_region_diff: Optional[List[int]] = None,
region_dist_scale: Optional[Iterable[float]] = None,
verbose: Optional[bool] = False,
integrity_check: Optional[bool] = True):
# Can't have mutable types as default :(
if region_affinity is None:
region_affinity = [None]
if region_dist_scale is None:
region_dist_scale = [1]
if max_region_diff is None:
max_region_diff = [0]
self.linkage_cell = linkage_cell
self.linkage_region = linkage_region
self.cell_type_affinity = cell_type_affinity
self.region_affinity = region_affinity
self.max_region_diff = max_region_diff
self.region_dist_scale = region_dist_scale
self.verbose = verbose
self.integrity_check = integrity_check
self.agglomerators: List[Agglomerate3D] = []
self.augmented_tree_scores: List[Dict[str, float]] = []
self.tree_scores: Dict[str, List[float]] = {metric: [] for metric in TREE_SCORE_OPTIONS}
self.pbar = \
tqdm(total=np.product(list(map(len, [
linkage_cell, linkage_region, cell_type_affinity, region_affinity, max_region_diff, region_dist_scale
]))))
@staticmethod
def _agglomerate_func(lc, lr, cta, ra, mrd, rds, ic, data):
agglomerate = Agglomerate3D(linkage_cell=lc,
linkage_region=lr,
cell_type_affinity=cta,
region_affinity=ra,
max_region_diff=mrd,
region_dist_scale=rds,
verbose=False,
pbar=False,
integrity_check=ic
)
agglomerate.agglomerate(data)
return agglomerate
def _collect_agglomerators(self, result):
self.agglomerators.append(result)
self.pbar.update(1)
def agglomerate(self, data_ct: DataLoader):
pool = mp.Pool(mp.cpu_count())
for lc, lr, cta, ra, mrd, rds in product(self.linkage_cell,
self.linkage_region,
self.cell_type_affinity,
self.region_affinity,
self.max_region_diff,
self.region_dist_scale):
if self.verbose:
print(f'Starting agglomeration with {lc, lr, cta, ra, mrd, rds, self.integrity_check}')
pool.apply_async(self._agglomerate_func,
args=(lc, lr, cta, ra, mrd, rds, self.integrity_check, data_ct),
callback=self._collect_agglomerators)
pool.close()
pool.join()
self.pbar.close()
def _collect_augmented_scores(self, result):
lc, lr, mrd, rds, scores = result
for metric, score in zip(TREE_SCORE_OPTIONS, scores):
self.augmented_tree_scores.append(
{'linkage_cell': lc, 'linkage_region': lr, 'max_region_diff': mrd, 'region_dist_scale': rds,
'score metric': metric, 'score': score})
self.pbar.update(1)
@staticmethod
def _augmented_score_func(a: Agglomerate3D) -> Tuple[str, str, int, float, List[float]]:
return a.linkage_cell, \
a.linkage_region, \
a.max_region_diff, \
a.region_dist_scale, \
[a.compute_tree_score(m) for m in TREE_SCORE_OPTIONS]
def get_all_scores(self) -> pd.DataFrame:
self._compute_tree_scores(func=self._augmented_score_func, callback=self._collect_augmented_scores)
return pd.DataFrame(self.augmented_tree_scores)
def _compute_tree_scores(self, func: Callable, callback: Callable):
self.pbar = tqdm(total=len(self.agglomerators))
pool = mp.Pool(mp.cpu_count())
for a in self.agglomerators:
pool.apply_async(func=func,
args=(a,),
callback=callback
)
pool.close()
pool.join()
self.pbar.close()
def _collect_basic_scores(self, scores: List[float]):
for metric, score in zip(TREE_SCORE_OPTIONS, scores):
self.tree_scores[metric].append(score)
self.pbar.update(1)
@staticmethod
def _basic_score_func(a: Agglomerate3D) -> List[float]:
return [a.compute_tree_score(m) for m in TREE_SCORE_OPTIONS]
def get_best_agglomerators(self) -> Dict[str, Tuple[float, np.array]]:
self._compute_tree_scores(func=self._basic_score_func, callback=self._collect_basic_scores)
# best_agglomerators: Dict[str, Tuple[float, Agglomerate3D]] = {
# metric: (np.min(self.tree_scores[metric]), self.agglomerators[int(np.argmin(self.tree_scores[metric]))])
# for metric in TREE_SCORE_OPTIONS
# }
best_agglomerators: Dict[str, Tuple[float, Agglomerate3D]] = {
metric: (
np.min(self.tree_scores[metric]),
np.unique(
np.array(self.agglomerators)[np.where(self.tree_scores[metric] == np.min(self.tree_scores[metric]))]
)
)
for metric in TREE_SCORE_OPTIONS
}
return best_agglomerators | 3dtrees-nbingo | /3dtrees_nbingo-0.1.5-py3-none-any.whl/agglomerate/batch_agglomerate_3d.py | batch_agglomerate_3d.py |
# 3GPP Bibtex entry generator
[![Build Status](https://travis-ci.org/martisak/3gpp-citations.svg?branch=master)](https://travis-ci.org/martisak/3gpp-citations) ![](https://img.shields.io/github/issues-raw/martisak/3gpp-citations.svg?style=flat) ![](https://img.shields.io/github/license/martisak/3gpp-citations.svg?style=flat) [![Coverage Status](https://coveralls.io/repos/github/martisak/3gpp-citations/badge.svg?branch=master)](https://coveralls.io/github/martisak/3gpp-citations?branch=master) [![Gitter chat](https://badges.gitter.im/martisak/3gpp-citations.png)](https://gitter.im/3gpp-citations/community "Gitter chat") [![](https://img.shields.io/pypi/v/3gpp-citations.svg?style=flat)](https://pypi.org/project/3gpp-citations/) ![](https://img.shields.io/pypi/dd/3gpp-citations.svg?style=flat) ![](https://img.shields.io/pypi/pyversions/3gpp-citations.svg?style=flat) [![HitCount](http://hits.dwyl.io/martisak/3gpp-citations.svg)](http://hits.dwyl.io/martisak/3gpp-citations) ![](https://img.shields.io/codeclimate/maintainability/martisak/3gpp-citations.svg?style=flat)
This project aims to generate [BiBTeX](http://www.bibtex.org/) files that
can be used when citing [3GPP](3gpp.org) specifications. The input is a document list exported from the [3GPP Portal](https://portal.3gpp.org/).
## Installation
`pip install 3gpp-citations`
To also install test dependencies run
`pip install 3gpp-citations[test]`
## Instructions
1. Go to the [3GPP Portal](https://portal.3gpp.org/#55936-specifications)
2. Generate the list of specifications you want.
3. Download to Excel and save file
4. Run `python 3gpp-citations.py -i exported.xlsx -o 3gpp.bib`
5. Use in LaTeX.
*Optionally* use the provided `3gpp.bib` directly.
## Things to note
* The output `bibtex` class is set to `@techreport`.
* If you add the option `--xelatex`, break-symbols `\-` will be used in url-fields.
* The version and date are read from 3gpp.org, but it is slow so it takes a while to parse the list. If you find an easy solution to this, let me know.
## Example output
~~~
@techreport{3gpp.36.331,
author = {3GPP},
day = {20},
institution = {{3rd Generation Partnership Project (3GPP)}},
month = {04},
note = {Version 14.2.2},
number = {36.331},
title = {{Evolved Universal Terrestrial Radio Access (E-UTRA); Radio Resource Control (RRC); Protocol specification}},
type = {Technical Specification (TS)},
url = {https://portal.3gpp.org/desktopmodules/Specifications/SpecificationDetails.aspx?specificationId=2440},
year = {2017}
}
~~~
## Contribute
See our [contribution guidelines](CONTRIBUTING.md) and our [Code of Conduct](CODE_OF_CONDUCT.md).
## Acknowledgment
This project has been updated as part of the [WASP Software and Cloud Technology](http://wasp-sweden.org/graduate-school/courses/software-and-cloud-technology-spring-2019/) course.
This work was partially supported by the Wallenberg AI, Autonomous Systems and Software Program (WASP) funded by the Knut and Alice Wallenberg Foundation.
| 3gpp-citations | /3gpp-citations-1.1.4.tar.gz/3gpp-citations-1.1.4/README.md | README.md |
from __future__ import print_function
import argparse
from argparse import RawTextHelpFormatter
from datetime import datetime
from openpyxl import load_workbook
from bibtexparser.bwriter import BibTexWriter
from bibtexparser.bibdatabase import BibDatabase
from lxml import html
import requests
from tqdm import tqdm
DESCRIPTION = """
3GPP Bibtex entry generator --- Convert 3GPP document list from .xls to .bib.
1. Go to the [3GPP Portal](https://portal.3gpp.org/#55936-specifications)
2. Generate the list of specifications you want.
3. Download to Excel and save file
4. Run
`python 3gpp-citations.py -i exported.xlsx -o 3gpp.bib`
5. Use in LaTeX.
* The output `bibtex` class is set to `@techreport`.
* The version and date are read from the URL, but it is slow so it takes
a while to parse the list. If you find an easy solution to this, let me know.
"""
EPILOG = """
Example output:
@Techreport{3gpp.36.331,
author = "3GPP",
title = "{Evolved Universal Terrestrial Radio Access (E-UTRA);
Radio Resource Control (RRC); Protocol specification}",
type = "TS",
institution = "{3rd Generation Partnership Project (3GPP)}",
number = "{36.331}",
days = 11,
month = jul,
year = 2016,
url = "http://www.3gpp.org/dynareport/36331.htm",
}
"""
def parse_excel_row(row):
"""
Parse a row in the sheet and return the data.
"""
number = row[0].value
title = row[2].value
doctype = row[1].value
return number, title, doctype
def format_entry(number, title, doctype, url):
"""
Format the bibtex entry, return as dict
"""
return {
'ID': "3gpp.{}".format(number),
'ENTRYTYPE': "techreport",
'title': "{{{}}}".format(title),
'type': doctype,
'author': "3GPP",
'institution': "{3rd Generation Partnership Project (3GPP)}",
'number': number,
'url': url
}
def format_url(number, xelatex=True):
r"""
This function formats the URL field. If xelatex is used
then we can allow for break-markers "\-"
"""
# Disable Anomalous backslash in string: '\-'.
# String constant might be missing an r prefix.
# (anomalous-backslash-in-string)
breakchar = "\-" if xelatex else "" # pylint: disable=W1401
url = "http://www.3gpp.org/{breakchar}DynaReport/" \
"{breakchar}{number}.htm".format(
breakchar=breakchar,
number=number.replace(".", ""))
return url
def parse_date(datestr):
"""
This function parses a string of the form 1982-06-22 into
year, month, day and returns them as strings.
"""
datetime_object = datetime.strptime(datestr, '%Y-%m-%d')
year = str(datetime_object.year)
month = str(datetime_object.month)
day = str(datetime_object.day)
return year, month, day
def get_bibdatabase():
"""
Create an empty BibDatabase
"""
bib_database = BibDatabase()
bib_database.entries = []
return bib_database
def get_worksheet(filename):
"""
Open a workbook and return the first sheet.
"""
workbook = load_workbook(filename)
worksheet = workbook[workbook.sheetnames[0]]
return worksheet
def get_entry(row, xelatex=True):
"""
Return an entry from a row in the Excel-sheet
"""
number, title, doctype = parse_excel_row(row)
if number is None:
return None
url = format_url(number, xelatex)
entry = format_entry(number, title, doctype, url)
# The Excel sheet does not contain version or release.
if row[0].hyperlink is not None:
# entry['url'] = row[0].hyperlink.target
page = requests.get(row[0].hyperlink.target)
tree = html.fromstring(page.content)
# It is enough to go through the first two (latest two) releases.
for release in range(2):
release_row = tree.xpath(
('//tr[@id="SpecificationReleaseControl1_rpbReleases_i{}'
'_ctl00_specificationsVersionGrid_ctl00__0"]/td/div/a')
.format(release))
if release_row:
daterow = tree.xpath(
('//tr[@id="SpecificationReleaseControl1_rpbReleases'
'_i{}_ctl00_specificationsVersionGrid_ctl00__0"]/td')
.format(release))
entry['note'] = "Version {}".format(
release_row[1].text.strip())
datestr = daterow[2].text.strip()
if datestr:
entry['year'], entry['month'], entry['day'] = \
parse_date(datestr)
break
return entry
def write_bibtex(bib_database, filename=None):
"""
If a filename is submitted we print to file, otherwise to stdout
"""
writer = BibTexWriter()
if filename is not None:
with open(filename, 'w') as bibfile:
bibfile.write(writer.write(bib_database))
else:
print(writer.write(bib_database))
def main(args):
"""
The main function that does all the heavy lifting.
"""
bib_database = get_bibdatabase()
worksheet = get_worksheet(args.input)
# Iterate over the rows in the Excel-sheet but skip the header.
for row in tqdm(
worksheet.iter_rows(row_offset=1),
total=worksheet.max_row - 1):
entry = get_entry(row, args.xelatex)
if entry is not None:
bib_database.entries.append(entry)
write_bibtex(bib_database, args.output)
def parse_args(args):
"""
Parse arguments
"""
parser = argparse.ArgumentParser(
description=DESCRIPTION,
epilog=EPILOG,
formatter_class=RawTextHelpFormatter)
parser.add_argument('--input', '-i', metavar='INPUT',
required=True,
help=('The Excel file generated by and '
'exported from the 3GPP Portal '
'(https://portal.3gpp.org)'))
parser.add_argument('--output', '-o', metavar='OUTPUT',
help=('The bib file to write to. '
'STDOUT is used if omitted.'))
parser.add_argument('--xelatex',
action='store_true',
help='Use line breaks')
args = parser.parse_args(args)
return args | 3gpp-citations | /3gpp-citations-1.1.4.tar.gz/3gpp-citations-1.1.4/standardcitations/standardcitations.py | standardcitations.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | 3mensolutions-distribution | /3mensolutions_distribution-1.0.tar.gz/3mensolutions_distribution-1.0/3mensolutions_distribution/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | 3mensolutions-distribution | /3mensolutions_distribution-1.0.tar.gz/3mensolutions_distribution-1.0/3mensolutions_distribution/Binomialdistribution.py | Binomialdistribution.py |
import re
from typing import Tuple, List
from .utils import get_mapped_commands
from .errors import TranslationMissing
from .command import Command, cmd_from_info
class Threepio(object):
def __init__(self, from_lang: str, to_lang: str, framework: object):
self.commands = get_mapped_commands()
self.from_lang = from_lang
self.to_lang = to_lang
self.framework = framework
def _normalize_func_name(self, name: str, lang: str) -> str:
alpha = re.compile("[^a-zA-Z]")
return alpha.sub("", name).lower()
def _order_args(
self, cmd: Command, from_info: dict, to_info: dict
) -> Tuple[list, dict]:
new_args = []
new_kwargs = {}
for i, arg in enumerate(cmd.args):
from_arg = from_info["args"][i]
to_arg_index = next(
(
index
for index, d in enumerate(to_info["args"])
if d["name"] == from_arg.get(self.to_lang, None)
),
None,
)
if to_arg_index is None:
new_args.append(arg)
continue
new_args.insert(to_arg_index, arg)
# If any kwargs are normal args, splice them in as well
for k, v in cmd.kwargs.items():
from_arg = [a for a in from_info["args"] if a["name"] == k][0]
to_arg_index = next(
(
index
for index, d in enumerate(to_info["args"])
if d["name"] == from_arg.get(self.to_lang, {})
),
None,
)
if to_arg_index is None:
new_kwargs[k] = v
continue
new_args.insert(to_arg_index, v)
return new_args, new_kwargs
def translate_multi(self, orig_cmd, commands_info):
cmd_config = commands_info.pop(0)
store = {}
for i, arg in enumerate(orig_cmd.args):
cmd_config["args"][i]["value"] = arg
store[cmd_config["args"][i]["name"]] = arg
new_cmds = [cmd_config]
for from_info in commands_info:
cmd = cmd_from_info(from_info, store)
to_info = self.commands[self.to_lang][
self._normalize_func_name(from_info.get(self.to_lang), self.to_lang)
][0]
new_cmds.append(self.translate_command(cmd, from_info, to_info))
return new_cmds
def translate_command(self, cmd, from_command, to_command):
attrs = to_command["attrs"][1:]
translated_cmd = None
args, kwargs = self._order_args(cmd, from_command, to_command)
output = from_command.get("placeholder_output", None)
return Command(
to_command["name"],
args,
kwargs,
attrs=to_command["attrs"],
placeholder_output=output,
exec_fn=translated_cmd,
)
def translate(self, cmd: Command, lookup_command: bool = False) -> List[Command]:
from_info = self.commands[self.from_lang][
self._normalize_func_name(cmd.function_name, self.from_lang)
]
if len(from_info) > 1:
return self.translate_multi(cmd, from_info)
from_info = from_info[0]
if from_info.get(self.to_lang, None) is None:
raise TranslationMissing(cmd.function_name)
to_info = self.commands[self.to_lang][
self._normalize_func_name(from_info.get(self.to_lang), self.to_lang)
]
return [self.translate_command(cmd, from_info, to_info[0])] | 3p0 | /3p0-0.0.10.tar.gz/3p0-0.0.10/pythreepio/threepio.py | threepio.py |
from typing import Union, Dict
import requests
from requests.auth import HTTPBasicAuth
class GitHub:
_api_base_url = 'https://api.github.com'
def __init__(self, username, token):
self.username = username
self.token = token
@classmethod
def verify_token(cls, username: str, token: str) -> bool:
"""
Verify a GitHub personal access token.
:param username: The GitHub user associated with the token
:param token: The personal access token
:return:
"""
r = requests.get('https://api.github.com/user', auth=HTTPBasicAuth(username, token))
return r.status_code == 200
def get_user(self, username: str) -> Union[Dict, None]:
"""
Get a GitHub user.
:param username: The user to get from GitHub.
:return: JSON response from GitHub API if the user exists
"""
r = requests.get(f'{self._api_base_url}/users/{username}')
return r.json() if r.status_code == 200 else None
def create_repo(self, name: str) -> Union[Dict, None]:
"""
Create a private repo on GitHub.
:param name: The name of the repo
:return: JSON response from GitHub API if the request was successful
"""
r = requests.post(
f'{self._api_base_url}/user/repos',
json={'name': name, 'private': True},
auth=HTTPBasicAuth(self.username, self.token)
)
return r.json() if r.status_code == 201 else None
def add_collaborator(self, repo_name: str, username: str, admin: bool = False) -> bool:
"""
Add a collaborator to a GitHub repo.
:param repo_name: The name of the repo on GitHub
:param username: The username of the collaborator
:param admin: Whether or not the collaborator should have admin privileges
:return: True if the request was successful
"""
r = requests.put(
f'{self._api_base_url}/repos/{self.username}/{repo_name}/collaborators/{username}',
auth=HTTPBasicAuth(self.username, self.token),
json={'permission': 'admin'} if admin else None
)
return r.status_code in (201, 204) | 3q | /3q-0.1.6.tar.gz/3q-0.1.6/qqq/github.py | github.py |
import configparser
import os
import secrets
from pathlib import Path
import click
import click_spinner
import shortuuid
from git import Repo
from git.exc import InvalidGitRepositoryError
from .github import GitHub
CONFIG_FILE = 'config.ini'
QQQ = 'qqq'
@click.group()
def cli():
"""
QQQ allows you to easily share your currently checked-out git branch with
other people via GitHub.
How to use QQQ:\n
1. Obtain a personal access token from GitHub with the full `repo` permission.\n
2. Use `qqq login` to save your GitHub access token to the QQQ config file.\n
3. `cd` to your local git repository and run `qqq send` to share the currently
checked-out branch with other GitHub users.
"""
pass
@cli.command()
@click.option('-u', '--user', 'user', help='Your GitHub username.')
@click.option('-t', '--token', 'token', help='Your GitHub personal access token.')
def login(user, token):
"""Save your GitHub access token."""
app_dir = click.get_app_dir(QQQ)
config_path = f'{app_dir}/{CONFIG_FILE}'
# Verify user
with click_spinner.spinner():
if not GitHub.verify_token(user, token):
click.echo(click.style('Invalid GitHub username or token!', fg='red'))
raise click.Abort
# Check if file already exists
if Path(config_path).is_file():
# File exists, prompt to overwrite
click.confirm(f'{click.format_filename(config_path)} already exists, update?', abort=True)
# Create config object
cp = configparser.ConfigParser()
cp['auth'] = {
'user': user,
'token': token
}
# Make sure the qqq dir exists
if not Path(app_dir).is_dir():
click.echo(f'Creating directory {click.format_filename(app_dir)}...')
Path(app_dir).mkdir(parents=True, exist_ok=True)
# Write to config file
with open(config_path, 'w') as config_file:
cp.write(config_file)
click.echo(f'Updated config file located at:\t{click.format_filename(config_path)}')
@cli.command()
@click.argument('github_username')
@click.option('-a', '--admins', multiple=True, required=False, help='GitHub users to invite as admin collaborators.')
def send(github_username, admins):
"""Share your local branch with other GitHub users."""
config_path = f'{click.get_app_dir(QQQ)}/{CONFIG_FILE}'
# Create the repo object
try:
repo = Repo(os.getcwd())
except InvalidGitRepositoryError:
click.echo(click.style('Please use qqq from within a valid git repository.', fg='red'))
raise click.Abort
if repo.bare:
# Confirm the user wants to use an empty repo
click.confirm('Repository appears to be bare, continue?', abort=True)
# Make sure config file exists
if not Path(config_path).is_file():
click.echo(click.style('Config files does not exist. Run `qqq login`.', fg='red'))
raise click.Abort
# Read the config file
cp = configparser.ConfigParser()
try:
cp.read(config_path)
auth_user = cp.get('auth', 'user')
auth_token = cp.get('auth', 'token')
except configparser.Error:
click.echo(click.style('Malformed configuration file.', fg='red'))
raise click.Abort
gh = GitHub(auth_user, auth_token)
# Verify user exists on GitHub
user = gh.get_user(github_username)
if user is None:
click.echo(f'Could not find GitHub user {github_username}.')
raise click.Abort
# Generate new repo name
repo_name = f'{github_username}-{shortuuid.uuid()}'
# Ask user for branch name
branch_name = click.prompt('Enter the branch name on the remote repository', default='master')
# Confirm with user
click.echo(f'Preparing to send the current branch to {github_username}...')
_repo = f''
_msg = f'''Are you sure you want to send the current branch to {user["login"]} ({user["name"]})? This will:
\t1. Take the current `{repo.active_branch}` branch and force push to {auth_user}/{repo_name} on GitHub (private)
\t2. Invite {github_username} as a collaborator\n'''
if admins:
_msg += f'\t3. Invite {", ".join([str(a) for a in admins])} as {"an " if len(admins) == 1 else ""}' \
f'admin collaborator{"s" if len(admins) > 1 else ""}\n'
click.confirm(click.style(_msg, fg='cyan'), abort=True)
click.echo(f'Creating repo on GitHub and inviting {user["login"]}...')
with click_spinner.spinner():
# Create repo on GitHub
new_repo = gh.create_repo(repo_name)
if new_repo is None:
click.echo(click.style('Failed to create repository on GitHub.', fg='red'))
raise click.Abort
# Push the current branch to the new repo
_tmp_remote_name = secrets.token_urlsafe()
_tmp_remote_url = f'https://{auth_token}:x-oauth-basic@github.com/{auth_user}/{repo_name}.git'
new_remote = repo.create_remote(_tmp_remote_name, _tmp_remote_url)
new_remote.push(f'{repo.head.ref}:{branch_name}')
repo.delete_remote(_tmp_remote_name)
if not gh.add_collaborator(repo_name, user["login"]):
click.echo(click.style(f'Error inviting {user["login"]}.', fg='red'))
# Invite admin collaborators
for admin_username in admins:
au = gh.get_user(admin_username) # Verify the admin collaborator's GitHub account
if au:
click.confirm(click.style(f'Are you sure you want to invite {au["login"]} as an admin?', fg='cyan'))
click.echo(f'Inviting admin {au["login"]} ({au["name"]})...')
with click_spinner.spinner():
if not gh.add_collaborator(repo_name, admin_username, admin=True):
click.echo(click.style(f'Error inviting {au["login"]}.', fg='red'))
else:
click.echo(click.style(f'Could not find {admin_username}.', fg='red'))
click.echo('Done!') | 3q | /3q-0.1.6.tar.gz/3q-0.1.6/qqq/qqq.py | qqq.py |
import logging
from typing import Dict, List, Optional, TYPE_CHECKING, Union, Any, Iterator
import collections.abc
import requests
from threescale_api import utils
if TYPE_CHECKING:
from threescale_api.client import ThreeScaleClient, RestApiClient
log = logging.getLogger(__name__)
class DefaultClient(collections.abc.Mapping):
def __init__(self, parent=None, instance_klass=None,
entity_name: str = None, entity_collection: str = None):
"""Creates instance of the default client
Args:
parent: Parent resource or client
instance_klass: Which class should be used to instantiate the resource
entity_name(str): Entity name - required for extraction
entity_collection(str): Collection name - required for extraction
"""
self.parent = parent
self._instance_klass = instance_klass
self._entity_name = entity_name
if entity_collection is None and entity_name is not None:
entity_collection = f'{entity_name}s'
self._entity_collection = entity_collection
@property
def url(self) -> str:
"""Default url for the resources collection
Returns(str): URL
"""
return self.threescale_client.admin_api_url
@property
def threescale_client(self) -> 'ThreeScaleClient':
"""Gets instance of the 3scale default client
Returns(TheeScaleClient): 3scale client
"""
return self.parent.threescale_client
@property
def rest(self) -> 'RestApiClient':
"""Rest API client for the 3scale instance
Returns(RestApiClient):
"""
return self.threescale_client.rest
def list(self, **kwargs) -> List['DefaultResource']:
"""List all entities
Args:
**kwargs: Optional parameters
Returns(List['DefaultResource]): List of resources
"""
log.info(self._log_message("[LIST] List", args=kwargs))
instance = self._list(**kwargs)
return instance
def create(self, params: dict = None, **kwargs) -> 'DefaultResource':
"""Create a new instance
Args:
params: Parameters required to create new instance
**kwargs: Optional parameters
Returns:
"""
log.info(self._log_message("[CREATE] Create new ", body=params, args=kwargs))
url = self._entity_url()
response = self.rest.post(url=url, json=params, **kwargs)
instance = self._create_instance(response=response)
return instance
def delete(self, entity_id: int = None, **kwargs) -> bool:
"""Delete resource
Args:
entity_id(int): Entity id
**kwargs: Optional args
Returns(bool): True if the resource has been successfully deleted
"""
log.info(self._log_message("[DELETE] Delete ", entity_id=entity_id, args=kwargs))
url = self._entity_url(entity_id=entity_id)
response = self.rest.delete(url=url, **kwargs)
return response.ok
def exists(self, entity_id=None, throws=False, **kwargs) -> bool:
"""Check whether the resource exists
Args:
entity_id(int): Entity id
**kwargs: Optional args
Returns(bool): True if the resource exists
"""
log.info(self._log_message("[EXIST] Resource exist ", entity_id=entity_id, args=kwargs))
url = self._entity_url(entity_id=entity_id)
response = self.rest.get(url=url, throws=throws, **kwargs)
return response.ok
def update(self, entity_id=None, params: dict = None, **kwargs) -> 'DefaultResource':
"""Update resource
Args:
entity_id(int): Entity id
params(dict): Params to be updated
**kwargs: Optional args
Returns(DefaultResource): Resource instance
"""
log.info(self._log_message("[UPDATE] Update ", body=params,
entity_id=entity_id, args=kwargs))
url = self._entity_url(entity_id=entity_id)
response = self.rest.put(url=url, json=params, **kwargs)
instance = self._create_instance(response=response)
return instance
def fetch(self, entity_id: int = None, **kwargs) -> dict:
"""Fetch the entity dictionary
Args:
entity_id(int): Entity id
**kwargs: Optional args
Returns(dict): Resource dict from the 3scale
"""
log.debug(self._log_message("[FETCH] Fetch ", entity_id=entity_id, args=kwargs))
url = self._entity_url(entity_id=entity_id)
response = self.rest.get(url=url, **kwargs)
return utils.extract_response(response=response, entity=self._entity_name)
def __getitem__(self, selector: Union[int, 'str']) -> 'DefaultResource':
"""Gets the item
Args:
selector(Union[int, 'str']): Selector whether id or string
Returns(DefaultResource): Resource instance
"""
if isinstance(selector, int):
return self.read(selector)
return self.read_by_name(selector)
def __len__(self) -> int:
return len(self._list())
def __iter__(self) -> Iterator['DefaultResource']:
return next(iter(self._list()))
def read(self, entity_id: int = None) -> 'DefaultResource':
"""Read the instance, read will just create empty resource and lazyloads only if needed
Args:
entity_id(int): Entity id
Returns(DefaultResource): Default resource
"""
log.debug(self._log_message("[READ] Read ", entity_id=entity_id))
return self._instance_klass(client=self, entity_id=entity_id)
def read_by_name(self, name: str, **kwargs) -> 'DefaultResource':
"""Read resource by name
Args:
name: Name of the resource (either system name, name, org_name ...)
**kwargs:
Returns:
"""
for item in self._list(**kwargs):
if item.entity_name and item.entity_name == name:
return item
def select(self, predicate, **kwargs) -> List['DefaultResource']:
"""Select resource s based on the predicate
Args:
predicate: Predicate
**kwargs: Optional args
Returns: List of resources
"""
return [item for item in self._list(**kwargs) if predicate(item)]
def select_by(self, **params) -> List['DefaultResource']:
"""Select by params - logical and
Usage example: select_by(role='admin')
Args:
**params: params used for selection
Returns: List of resources
"""
log.debug("[SELECT] By params: %s", params)
def predicate(item):
for (key, val) in params.items():
if item[key] != val:
return False
return True
return self.select(predicate=predicate)
def read_by(self, **params) -> 'DefaultResource':
"""Read by params - it will return just one instance of the resource
Args:
**params: params used for selection
Returns(DefaultResource): Resource instance
"""
result = self.select_by(**params)
return result[0] if result else None
def _log_message(self, message, entity_id=None, body=None, args=None) -> str:
msg = f"{message} {self._instance_klass.__name__}"
if entity_id:
msg += f"({entity_id}))"
if body:
msg += f" {body}"
if args:
msg += f" args={args}"
return msg
def _list(self, **kwargs) -> List['DefaultResource']:
"""Internal list implementation used in list or `select` methods
Args:
**kwargs: Optional parameters
Returns(List['DefaultResource']):
"""
url = self._entity_url()
response = self.rest.get(url=url, **kwargs)
instance = self._create_instance(response=response, collection=True)
return instance
def _entity_url(self, entity_id=None) -> str:
if not entity_id:
return self.url
return self.url + '/' + str(entity_id)
def _create_instance(self, response: requests.Response, klass=None, collection: bool = False):
klass = klass or self._instance_klass
extracted = self._extract_resource(response, collection)
instance = self._instantiate(extracted=extracted, klass=klass)
log.debug("[INSTANCE] Created instance: %s", instance)
return instance
def _extract_resource(self, response, collection) -> Union[List, Dict]:
extract_params = dict(response=response, entity=self._entity_name)
if collection:
extract_params['collection'] = self._entity_collection
extracted = utils.extract_response(**extract_params)
return extracted
def _instantiate(self, extracted, klass):
if isinstance(extracted, list):
instance = [self.__make_instance(item, klass) for item in extracted]
return instance
return self.__make_instance(extracted, klass)
def __make_instance(self, extracted: dict, klass):
instance = klass(client=self, entity=extracted) if klass else extracted
return instance
class DefaultResource(collections.abc.MutableMapping):
def __init__(self, client: DefaultClient = None, entity_id: int = None, entity_name: str = None,
entity: dict = None):
"""Create instance of the resource
Args:
client: Client instance of the resource
entity_id(int): Entity id
entity_name(str): Entity name field (system_name or name ...)
entity(dict): Entity instance
"""
self._entity_id = entity_id or entity.get('id')
self._entity = entity
self._client = client
self._entity_name = entity_name
@property
def threescale_client(self) -> 'ThreeScaleClient':
return self.client.threescale_client
@property
def parent(self) -> 'DefaultResource':
return self.client.parent
@parent.setter
def parent(self, parent):
self.client.parent = parent
@property
def entity_name(self) -> Optional[str]:
return self[self._entity_name]
@property
def url(self) -> str:
return self.client.url + f"/{self.entity_id}"
@property
def entity(self) -> dict:
self._lazy_load()
return self._entity
@property
def client(self) -> DefaultClient:
return self._client
@property
def entity_id(self) -> int:
return self._entity_id or self._entity.get('id')
@entity_id.setter
def entity_id(self, value):
self._entity_id = value
def __getitem__(self, item: str):
return self.entity.get(item)
def __setitem__(self, key: str, value):
self.set(key, value)
def __delitem__(self, key: str):
del self.entity[key]
def __len__(self) -> int:
return len(self.entity)
def __iter__(self) -> Iterator:
return iter(self.entity)
def __str__(self) -> str:
return self.__class__.__name__ + f"({self.entity_id}): " + str(self.entity)
def __repr__(self) -> str:
return str(self)
def __eq__(self, other) -> bool:
return (
self.__class__ == other.__class__ and
self.entity_name == other.entity_name and
self.entity_id == other.entity_id
)
def get(self, item):
return self.entity.get(item)
def set(self, item: str, value: Any):
self.entity[item] = value
def _lazy_load(self, **kwargs) -> 'DefaultResource':
if self._entity is None:
# Lazy load the entity
fetched = self.fetch(**kwargs)
if isinstance(fetched, dict):
self._entity = fetched
elif fetched is not None:
self._entity = fetched._entity
else:
return None
return self
def read(self, **kwargs) -> 'DefaultResource':
self._invalidate()
self._lazy_load(**kwargs)
return self
def fetch(self, **kwargs) -> dict:
return self.client.fetch(self.entity_id, **kwargs)
def exists(self, **kwargs) -> bool:
return self.client.exists(entity_id=self.entity_id, **kwargs)
def delete(self, **kwargs):
self.client.delete(entity_id=self.entity_id, resource=self, **kwargs)
def update(self, params: dict = None, **kwargs) -> 'DefaultResource':
new_params = {**self.entity}
if params:
new_params.update(params)
new_entity = self.client.update(entity_id=self.entity_id,
params=new_params,
resource=self,
**kwargs)
self._entity = new_entity.entity
return self
def _invalidate(self):
self._entity = None
class DefaultPaginationClient(DefaultClient):
""" Client to handle API endpoints with pagination.
List of endpoints supporting pagination with per_page size:
- accounts 500
limits per app plan 50 - not implemented in client
application list for all services 500 - not implemented in client
- backend mapping rules 500
- backend method list 500
- backend metric 500
- backend 500
- service 500
invoice list by account 20 - not implemented by standard "list" method
- invoice list 20
- all cms 100
"""
def __init__(self, *args, per_page=500, **kwargs):
self.per_page = per_page
super().__init__(*args, **kwargs)
def _list(self, **kwargs):
""" List all objects via paginated API endpoint """
kwargs = kwargs.copy()
kwargs.setdefault("params", {})
if "page" in kwargs["params"] or self.per_page is None:
return super()._list(**kwargs)
pagenum = 1
kwargs["params"]["page"] = pagenum
kwargs["params"]["per_page"] = self.per_page
page = super()._list(**kwargs)
ret_list = page
while len(page):
pagenum += 1
kwargs["params"]["page"] = pagenum
page = super()._list(**kwargs)
ret_list += page
return ret_list
def __iter__(self):
return self._list()
class DefaultPlanClient(DefaultClient):
def set_default(self, entity_id: int, **kwargs) -> 'DefaultPlanResource':
"""Sets default plan for the entity
Args:
entity_id: Entity id
**kwargs: Optional args
Returns(DefaultPlanResource):
"""
log.info(self._log_message("[PLAN] Set default ", entity_id=entity_id, args=kwargs))
url = self._entity_url(entity_id) + '/default'
response = self.rest.put(url=url, **kwargs)
instance = self._create_instance(response=response)
return instance
def get_default(self, **kwargs) -> Optional['DefaultResource']:
"""Get default plan if set
Args:
**kwargs: Optional arguments
Returns(DefaultResource): Resource instance
"""
default = self.select(lambda x: x.is_default, **kwargs)
if default:
return default[0]
return None
class DefaultPlanResource(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
def set_default(self, **kwargs) -> 'DefaultStateResource':
"""Set the plan default
Args:
**kwargs: Optional args
Returns(DefaultStateResource): State resource instance
"""
return self.client.set_default(entity_id=self.entity_id, **kwargs)
@property
def is_default(self) -> bool:
return self['default'] is True
class DefaultStateClient(DefaultPaginationClient):
def set_state(self, entity_id, state: str, **kwargs):
"""Sets the state for the resource
Args:
entity_id(int): Entity id
state(str): Which state
**kwargs: Optional args
Returns(DefaultStateResource): State resource instance
"""
log.info(self._log_message("[STATE] Set state ", body=f"[{state}]", args=kwargs))
url = self._entity_url(entity_id) + '/' + state
response = self.rest.put(url=url, **kwargs)
instance = self._create_instance(response=response)
return instance
class DefaultStateResource(DefaultResource):
def set_state(self, state: str, **kwargs) -> 'DefaultStateResource':
"""Sets the state for the resource
Args:
state(str): Which state
**kwargs: Optional args
Returns(DefaultStateResource): State resource instance
"""
return self.client.set_state(entity_id=self.entity_id, state=state, **kwargs)
class DefaultUserResource(DefaultStateResource):
def __init__(self, entity_name='username', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
def suspend(self, **kwargs) -> 'DefaultUserResource':
"""Suspends the user
Args:
**kwargs: Optional arguments
Returns(DefaultUserResource): User instance
"""
return self.set_state(state='suspend', **kwargs)
def resume(self, **kwargs):
"""Resumes the user
Args:
**kwargs: Optional arguments
Returns(DefaultUserResource): User instance
"""
return self.set_state(state='resume', **kwargs)
def activate(self, **kwargs):
"""Activates the user
Args:
**kwargs: Optional arguments
Returns(DefaultUserResource): User instance
"""
return self.set_state(state='activate', **kwargs)
def set_as_admin(self, **kwargs):
"""Promotes the user to admin
Args:
**kwargs: Optional arguments
Returns(DefaultUserResource): User instance
"""
return self.set_state(state='set_as_admin', **kwargs)
def set_as_member(self, **kwargs):
"""Demotes the user to s member
Args:
**kwargs: Optional arguments
Returns(DefaultUserResource): User instance
"""
return self.set_state(state='set_as_member', **kwargs) | 3scale-api | /3scale_api-0.32.0-py3-none-any.whl/threescale_api/defaults.py | defaults.py |
import logging
import time
from urllib.parse import urljoin
import backoff
import requests
from threescale_api import errors, resources
log = logging.getLogger(__name__)
class ThreeScaleClient:
def __init__(self, url: str, token: str,
throws: bool = True, ssl_verify: bool = True, wait: int = -1):
"""Creates instance of the 3scale client
Args:
url: 3scale instance url
token: Access token
throws: Whether it should throw an error
ssl_verify: Whether to verify ssl
wait: Whether to wait for 3scale availability, negative number == no waiting
positive number == wait another extra seconds
"""
self._rest = RestApiClient(url=url, token=token, throws=throws, ssl_verify=ssl_verify)
self._services = resources.Services(self, instance_klass=resources.Service)
self._accounts = resources.Accounts(self, instance_klass=resources.Account)
self._provider_accounts = \
resources.ProviderAccounts(self, instance_klass=resources.ProviderAccount)
self._provider_account_users = \
resources.ProviderAccountUsers(self, instance_klass=resources.ProviderAccountUser)
self._methods = resources.Methods(self, instance_klass=resources.Method)
self._metrics = resources.Metrics(self, instance_klass=resources.Metric)
self._analytics = resources.Analytics(self)
self._tenants = resources.Tenants(self, instance_klass=resources.Tenant)
self._providers = resources.Providers(self, instance_klass=resources.Provider)
self._access_tokens = \
resources.AccessTokens(self, instance_klass=resources.AccessToken)
self._active_docs = resources.ActiveDocs(self, instance_klass=resources.ActiveDoc)
self._account_plans = resources.AccountPlans(self, instance_klass=resources.AccountPlan)
self._settings = resources.SettingsClient(self)
self._admin_portal_auth_providers = resources.AdminPortalAuthProviders(
self, instance_klass=resources.AdminPortalAuthProvider)
self._dev_portal_auth_providers = resources.DevPortalAuthProviders(
self, instance_klass=resources.DevPortalAuthProvider)
self._policy_registry = resources.PoliciesRegistry(self,
instance_klass=resources.PolicyRegistry)
self._backends = resources.Backends(self, instance_klass=resources.Backend)
self._webhooks = resources.Webhooks(self)
self._invoices = resources.Invoices(self, instance_klass=resources.Invoice)
self._fields_definitions =\
resources.FieldsDefinitions(self, instance_klass=resources.FieldsDefinition)
self._cms_files = resources.CmsFiles(self, instance_klass=resources.CmsFile)
self._cms_sections = resources.CmsSections(self, instance_klass=resources.CmsSection)
self._cms_pages = resources.CmsPages(self, instance_klass=resources.CmsPage)
self._cms_builtin_pages = resources.CmsBuiltinPages(self, instance_klass=resources.CmsPage)
self._cms_layouts = resources.CmsLayouts(self, instance_klass=resources.CmsLayout)
self._cms_builtin_partials =\
resources.CmsBuiltinPartials(self, instance_klass=resources.CmsPartial)
self._cms_partials = resources.CmsPartials(self, instance_klass=resources.CmsPartial)
if wait >= 0:
self.wait_for_tenant()
# TODO: all the implemented checks aren't enough yet
# 3scale can still return 404/409 error, therefore slight artificial sleep
# here to mitigate the problem. This requires proper fix in checks
time.sleep(wait)
@backoff.on_predicate(
backoff.constant, lambda ready: not ready, interval=6, max_tries=90, jitter=None)
def wait_for_tenant(self) -> bool:
"""
When True is returned, there is some chance the tenant is actually ready.
"""
# TODO: checks below were collected from various sources to craft
# ultimate readiness check. There might be duplicates though, so
# worth to review it one day
try:
return self.account_plans.exists(throws=True) \
and len(self.account_plans.fetch()["plans"]) >= 1 \
and len(self.account_plans.list()) >= 1 \
and self.accounts.exists(throws=True) \
and len(self.accounts.list()) >= 1 \
and self.services.exists(throws=True) \
and len(self.services.list()) >= 1
except errors.ApiClientError as err:
if err.code in (404, 409, 503):
log.info("wait_for_tenant failed: %s", err)
return False
raise err
except Exception as err:
log.info("wait_for_tenant failed: %s", err)
return False
@property
def rest(self) -> 'RestApiClient':
"""Get REST api client instance
Returns(RestApiClient): Rest api client instance
"""
return self._rest
@property
def parent(self) -> 'ThreeScaleClient':
"""Parent is self - the 3scale client
Returns(ThreeScaleClient):
"""
return self
@property
def threescale_client(self) -> 'ThreeScaleClient':
"""3scale client instance
Returns(ThreeScaleClient): 3scale client instance
"""
return self
@property
def url(self) -> str:
"""Get tenant url
Returns(str): URL
"""
return self._rest.url
@property
def url_with_token(self) -> str:
return self.rest.url.replace('//', f"//{self.rest._token}@")
@property
def token(self) -> str:
return self.rest._token
@property
def admin_api_url(self) -> str:
"""Get admin API url
Returns(str): URL of the 3scale admin api
"""
return self.url + "/admin/api"
@property
def master_api_url(self) -> str:
"""Get master API url
Returns(str): URL of the 3scale master api
"""
return self.url + "/master/api"
@property
def services(self) -> resources.Services:
"""Gets services client
Returns(resources.Services): Services client
"""
return self._services
@property
def accounts(self) -> resources.Accounts:
"""Gets accounts client
Returns(resources.Accounts): Accounts client
"""
return self._accounts
@property
def provider_accounts(self) -> resources.ProviderAccounts:
"""Gets provider accounts client
Returns(resources.ProviderAccouts): Provider Accounts client"""
return self._provider_accounts
@property
def provider_account_users(self) -> resources.ProviderAccountUsers:
"""Gets provider account users client
Returns(resources.ProviderAccountUsers): Provider Accounts User client
"""
return self._provider_account_users
@property
def account_plans(self) -> resources.AccountPlans:
"""Gets accounts client
Returns(resources.AccountPlans): Account plans client
"""
return self._account_plans
@property
def methods(self) -> resources.Methods:
"""Gets methods client
Returns(resources.Methods): Methods client
"""
return self._methods
@property
def metrics(self) -> resources.Metrics:
"""Gets metrics client
Returns(resources.Metrics): Metrics client
"""
return self._metrics
@property
def analytics(self):
"""Gets analytics data client
Returns(resources.Analytics): Analytics client
"""
return self._analytics
@property
def providers(self) -> resources.Providers:
"""Gets providers client
Returns(resources.Providers): Providers client
"""
return self._providers
@property
def access_tokens(self) -> resources.AccessTokens:
"""Gets AccessTokens client
Returns(resources.AccessToken): AccessTokens client
"""
return self._access_tokens
@property
def tenants(self) -> resources.Tenants:
"""Gets tenants client
Returns(resources.Tenants): Tenants client
"""
return self._tenants
@property
def active_docs(self) -> resources.ActiveDocs:
"""Gets active docs client
Returns(resources.ActiveDocs): Active docs client
"""
return self._active_docs
@property
def settings(self) -> resources.SettingsClient:
"""Gets settings client
Returns(resources.SettingsClient): Active docs client
"""
return self._settings
@property
def backends(self) -> resources.Backends:
"""Gets backends client
Returns(resources.Backends): Backends client
"""
return self._backends
@property
def dev_portal_auth_providers(self) -> resources.DevPortalAuthProviders:
return self._dev_portal_auth_providers
@property
def admin_portal_auth_providers(self) -> resources.AdminPortalAuthProviders:
return self._admin_portal_auth_providers
@property
def policy_registry(self) -> resources.PolicyRegistry:
return self._policy_registry
@property
def webhooks(self) -> resources.Webhooks:
return self._webhooks
@property
def invoices(self) -> resources.Invoices:
return self._invoices
@property
def fields_definitions(self) -> resources.FieldsDefinitions:
return self._fields_definitions
@property
def cms_files(self) -> resources.CmsFiles:
return self._cms_files
@property
def cms_sections(self) -> resources.CmsSections:
return self._cms_sections
@property
def cms_pages(self) -> resources.CmsPages:
return self._cms_pages
@property
def cms_builtin_pages(self) -> resources.CmsBuiltinPages:
return self._cms_builtin_pages
@property
def cms_layouts(self) -> resources.CmsLayouts:
return self._cms_layouts
@property
def cms_partials(self) -> resources.CmsPartials:
return self._cms_partials
@property
def cms_builtin_partials(self) -> resources.CmsBuiltinPartials:
return self._cms_builtin_partials
class RestApiClient:
def __init__(self, url: str, token: str, throws: bool = True, ssl_verify: bool = True):
"""Creates instance of the Rest API client
Args:
url(str): Tenant url
token(str): Tenant provider token
throws(bool): Whether to throw exception
ssl_verify(bool): Whether to verify the ssl certificate
"""
self._url = url
self._token = token
self._throws = throws
self._ssl_verify = ssl_verify
log.debug("[REST] New instance: %s token=%s throws=%s ssl=%s", url, token, throws,
ssl_verify)
@property
def url(self) -> str:
return self._url
def request(self, method='GET', url=None, path='', params: dict = None,
headers: dict = None, throws=None, **kwargs):
"""Create new request
Args:
method(str): method to be used to create an request
url(str): url to be used to create new request
path(str): path to be accessed - if url is not provided
params(dict): Query parameters
headers(dict): Headers parameters
throws(bool): Whether to throw
**kwargs: Optional args added to request
Returns:
"""
if 'resource' in kwargs:
del kwargs['resource']
full_url = url if url else urljoin(self.url, path)
full_url = full_url + ".json"
headers = headers or {}
params = params or {}
if throws is None:
throws = self._throws
params.update(access_token=self._token)
log.debug("[%s] (%s) params={%s} headers={%s} %s", method, full_url, params, headers,
kwargs if kwargs else '')
response = requests.request(method=method, url=full_url, headers=headers,
params=params, verify=self._ssl_verify, **kwargs)
process_response = self._process_response(response, throws=throws)
return process_response
def get(self, *args, **kwargs):
return self.request('GET', *args, **kwargs)
def post(self, *args, **kwargs):
return self.request('POST', *args, **kwargs)
def put(self, *args, **kwargs):
return self.request('PUT', *args, **kwargs)
def delete(self, *args, **kwargs):
return self.request('DELETE', *args, **kwargs)
def patch(self, *args, **kwargs):
return self.request('PATCH', *args, **kwargs)
@classmethod
def _process_response(cls, response: requests.Response, throws=None) -> requests.Response:
message = f"[RES] Response({response.status_code}): {response.content}"
if response.ok:
log.debug(message)
else:
log.error(message)
if throws:
raise errors.ApiClientError(response.status_code, response.reason, response.content)
return response | 3scale-api | /3scale_api-0.32.0-py3-none-any.whl/threescale_api/client.py | client.py |
"""Implementation of custom 3scale specific authentication method(s) for requests (api clients"""
import requests
import requests.auth
class BaseClientAuth(requests.auth.AuthBase):
"""Abstract class for authentication of api client"""
def __init__(self, app, location=None):
self.app = app
self.location = location
self.credentials = {}
if location is None:
self.location = app.service.proxy.list().entity["credentials_location"]
def __call__(self, request):
credentials = self.credentials
if self.location == "authorization":
credentials = credentials.values()
auth = requests.auth.HTTPBasicAuth(*credentials)
return auth(request)
if self.location == "headers":
request.prepare_headers(credentials)
elif self.location == "query":
request.prepare_url(request.url, credentials)
else:
raise ValueError(f"Unknown credentials location '{self.location}'")
return request
class UserKeyAuth(BaseClientAuth):
"""Provides user_key authentication for api client calls"""
def __init__(self, app, location=None):
super().__init__(app, location)
self.credentials = {
self.app.service.proxy.list()["auth_user_key"]: self.app["user_key"]
}
def __call__(self, request):
if self.location == "authorization":
auth = requests.auth.HTTPBasicAuth(next(iter(self.credentials.values())), "")
return auth(request)
return super().__call__(request)
class AppIdKeyAuth(BaseClientAuth):
"""Provides app_id/app_key pair based authentication for api client calls"""
def __init__(self, app, location=None):
super().__init__(app, location)
proxy = self.app.service.proxy.list()
self.credentials = {
proxy["auth_app_id"]: self.app["application_id"],
proxy["auth_app_key"]: self.app.keys.list()["keys"][0]["key"]["value"]
}
def __call__(self, request):
return super().__call__(request) | 3scale-api | /3scale_api-0.32.0-py3-none-any.whl/threescale_api/auth.py | auth.py |
import logging
import shlex
from typing import Union, Iterable
from urllib.parse import urljoin
import requests
from requests.adapters import HTTPAdapter
from urllib3.util import Retry
logger = logging.getLogger(__name__)
def extract_response(response: requests.Response, entity: str = None,
collection: str = None) -> Union[dict, list]:
"""Extract the response from the response
Args:
response(requests.Response): Response
entity(str): entity name to be extracted
collection(str): collection name to be extracted
Returns(Union[dict, list]): Extracted entity or list of entities
"""
extracted: dict = response.json()
if collection and collection in extracted:
extracted = extracted.get(collection)
if isinstance(extracted, list):
return [value.get(entity) for value in extracted]
if entity in extracted.keys():
return extracted.get(entity)
return extracted
class HttpClient:
"""3scale specific!!! HTTP Client
This provides client to easily run api calls against provided service.
Due to some delays in the infrastructure the client is configured to retry
calls under certain conditions. To modify this behavior customized session
has to be passed. session has to be fully configured in such case
(e.g. including authentication"
:param app: Application for which client should do the calls
:param endpoint: either 'sandbox_endpoint' (staging) or 'endpoint' (production),
defaults to sandbox_endpoint
:param verify: SSL verification
:param cert: path to certificate
:param disable_retry_status_list:
Iterable collection of status code that should not be retried by requests
"""
def __init__(self, app, endpoint: str = "sandbox_endpoint",
verify: bool = None, cert=None, disable_retry_status_list: Iterable = ()):
self._app = app
self._endpoint = endpoint
self.verify = verify if verify is not None else app.api_client_verify
self.cert = cert
self._status_forcelist = {503, 404} - set(disable_retry_status_list)
self.auth = app.authobj()
self.session = self._create_session()
logger.debug("[HTTP CLIENT] New instance: %s", self._base_url)
def close(self):
"""Close requests session"""
self.session.close()
@staticmethod
def retry_for_session(session: requests.Session, status_forcelist: Iterable, total: int = 8):
retry = Retry(
total=total,
backoff_factor=1,
status_forcelist=status_forcelist,
raise_on_status=False,
respect_retry_after_header=False
)
adapter = HTTPAdapter(max_retries=retry)
session.mount("https://", adapter)
session.mount("http://", adapter)
@property
def _base_url(self) -> str:
"""Determine right url at runtime"""
return self._app.service.proxy.fetch()[self._endpoint]
def _create_session(self):
"""Creates session"""
session = requests.Session()
self.retry_for_session(session, self._status_forcelist)
return session
def extend_connection_pool(self, maxsize: int):
"""Extend connection pool"""
self.session.adapters["https://"].poolmanager.connection_pool_kw["maxsize"] = maxsize
self.session.adapters["https://"].poolmanager.clear()
def request(self, method, path,
params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=None, allow_redirects=True, proxies=None,
hooks=None, stream=None, json=None) -> requests.Response:
"""mimics requests interface"""
url = urljoin(self._base_url, path)
session = self.session
session.auth = auth or self.auth
req = requests.Request(
method=method.upper(),
url=url,
headers=headers,
files=files,
data=data or {},
json=json,
params=params or {},
auth=auth,
cookies=cookies,
hooks=hooks,
)
prep = session.prepare_request(req)
logger.info("[CLIENT]: %s", request2curl(prep))
send_kwargs = {
"timeout": timeout,
"allow_redirects": allow_redirects
}
proxies = proxies or {}
send_kwargs.update(
session.merge_environment_settings(prep.url, proxies, stream, self.verify, self.cert))
response = session.send(prep, **send_kwargs)
logger.info("\n".join(["[CLIENT]:", response2str(response)]))
return response
def get(self, *args, **kwargs) -> requests.Response:
"""mimics requests interface"""
return self.request('GET', *args, **kwargs)
def post(self, *args, **kwargs) -> requests.Response:
"""mimics requests interface"""
return self.request('POST', *args, **kwargs)
def patch(self, *args, **kwargs) -> requests.Response:
"""mimics requests interface"""
return self.request('PATCH', *args, **kwargs)
def put(self, *args, **kwargs) -> requests.Response:
"""mimics requests interface"""
return self.request('PUT', *args, **kwargs)
def delete(self, *args, **kwargs) -> requests.Response:
"""mimics requests interface"""
return self.request('DELETE', *args, **kwargs)
def request2curl(request: requests.PreparedRequest) -> str:
"""Create curl command corresponding to given request"""
# pylint: disable=consider-using-f-string
cmd = ["curl", "-X %s" % shlex.quote(request.method)]
if request.headers:
# pylint: disable=consider-using-f-string
cmd.extend([
"-H %s" % shlex.quote(f"{key}: {value}")
for key, value in request.headers.items()])
if request.body:
body = request.body
if isinstance(body, bytes):
body = body.decode("utf-8")
if len(body) > 160:
body = body[:160] + "..."
# pylint: disable=consider-using-f-string
cmd.append("-d %s" % shlex.quote(body))
cmd.append(shlex.quote(request.url))
return " ".join(cmd)
def response2str(response: requests.Response):
"""Return string representation of requests.Response"""
# Let's cheat with protocol, hopefully no-one will ever notice this ;)
msg = [f"HTTP/1.1 {response.status_code} {response.reason}"]
for key in response.headers:
msg.append(f"{key}: {response.headers[key]}")
msg.append("")
body = response.text
if len(body) > 160:
body = body[:160] + "..."
msg.append(body)
return "\n".join(msg) | 3scale-api | /3scale_api-0.32.0-py3-none-any.whl/threescale_api/utils.py | utils.py |
import logging
from enum import Enum
from typing import Dict, Union, List, Iterable
from threescale_api import auth
from threescale_api import utils
from threescale_api import errors
from threescale_api.defaults import DefaultClient, DefaultPlanClient, DefaultPlanResource, \
DefaultResource, DefaultStateClient, DefaultUserResource, DefaultStateResource, \
DefaultPaginationClient
from threescale_api import client
log = logging.getLogger(__name__)
class Services(DefaultPaginationClient):
def __init__(self, *args, entity_name='service', entity_collection='services',
per_page=500, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/services'
class MappingRules(DefaultPaginationClient):
def __init__(self, *args, entity_name='mapping_rule', entity_collection='mapping_rules',
per_page=None, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page,
**kwargs)
@property
def url(self) -> str:
return self.parent.url + '/mapping_rules'
class Metrics(DefaultPaginationClient):
def __init__(self, *args, entity_name='metric', entity_collection='metrics', per_page=None,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/metrics'
class Limits(DefaultClient):
def __init__(self, *args, entity_name='limit', entity_collection='limits', metric=None,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
self._metric = metric
@property
def metric(self) -> Union['Metric', 'BackendMetric']:
return self._metric
@property
def application_plan(self) -> 'ApplicationPlan':
return self.parent
def __call__(self, metric: 'Metric' = None) -> 'Limits':
self._metric = metric
return self
@property
def url(self) -> str:
return self.application_plan.plans_url + f'/metrics/{self.metric.entity_id}/limits'
def list_per_app_plan(self, **kwargs):
log.info("[LIST] List limits per app plan: %s", kwargs)
url = self.parent.url + '/limits'
response = self.rest.get(url=url, **kwargs)
instance = self._create_instance(response=response)
return instance
class PricingRules(DefaultClient):
def __init__(self, *args, entity_name='pricing_rule', entity_collection='pricing_rules',
metric: 'Metric' = None, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
self._metric = metric
@property
def metric(self) -> 'Metric':
return self._metric
@property
def application_plan(self) -> 'ApplicationPlan':
return self.parent
def __call__(self, metric: 'Metric' = None) -> 'PricingRules':
self._metric = metric
return self
@property
def url(self) -> str:
return self.application_plan.plans_url + f'/metrics/{self.metric.entity_id}/pricing_rules'
class Methods(DefaultPaginationClient):
def __init__(self, *args, entity_name='method', entity_collection='methods', per_page=None,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/methods'
class ApplicationPlans(DefaultPlanClient):
def __init__(self, *args, entity_name='application_plan', entity_collection='plans', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/application_plans'
@property
def plans_url(self) -> str:
return self.threescale_client.admin_api_url + '/application_plans'
class ApplicationPlanFeatures(DefaultClient):
def __init__(self, *args, entity_name='feature', entity_collection='features', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/features'
class AccountUsers(DefaultStateClient):
def __init__(self, *args, entity_name='user', entity_collection='users', per_page=None,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/users'
class AccountPlans(DefaultPlanClient):
def __init__(self, *args, entity_name='account_plan', entity_collection='plans', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/account_plans'
class Accounts(DefaultStateClient):
def __init__(self, *args, entity_name='account', entity_collection='accounts', per_page=500,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page,
**kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/accounts'
def create(self, params: dict = None, **kwargs) -> 'Account':
"""Create new account
Args:
params(dict): Parameters to used to create new instance
**kwargs: Optional args
Returns(Account): Account instance
"""
return self.signup(params=params, **kwargs)
def signup(self, params: dict, **kwargs) -> 'Account':
"""Sign Up for an account
Args:
params(dict): Parameters to used to create new instance
**kwargs: Optional args
Returns(Account): Account instance
"""
log.info("[SIGNUP] Create new Signup: params=%s, kwargs=%s", params, kwargs)
url = self.threescale_client.admin_api_url + '/signup'
response = self.rest.post(url=url, json=params, **kwargs)
instance = self._create_instance(response=response)
return instance
def set_plan(self, entity_id: int, plan_id: int, **kwargs):
"""Sets account plan for the account
Args:
entity_id: Entity id
plan_id: Plan id
**kwargs: Optional args
Returns:
"""
log.info("[PLAN] Set plan for an account(%s): %s", entity_id, plan_id)
params = dict(plan_id=plan_id)
url = self._entity_url(entity_id=entity_id) + '/change_plan'
response = self.rest.put(url=url, json=params, **kwargs)
instance = self._create_instance(response=response)
return instance
def send_message(self, entity_id: int, body: str, subject: str = None, **kwargs) -> Dict:
"""Send message to a developer account
Args:
entity_id(int): Entity id
body(str): Message body
**kwargs: Optional args
Returns(Dict): Response
"""
log.info("[MSG] Send message to account (%s): %s %s", entity_id, body, kwargs)
params = dict(body=body)
if subject:
params["subject"] = subject
url = self._entity_url(entity_id=entity_id) + '/messages'
response = self.rest.post(url=url, json=params, **kwargs)
instance = utils.extract_response(response=response)
return instance
def approve(self, entity_id: int, **kwargs) -> 'Account':
"""Approve the account
Args:
entity_id(int): Entity id
**kwargs: Optional args
Returns(Account): Account resource
"""
return self.set_state(entity_id=entity_id, state='approve', **kwargs)
def reject(self, entity_id, **kwargs) -> 'Account':
"""Reject the account
Args:
entity_id(int): Entity id
**kwargs: Optional args
Returns(Account): Account resource
"""
return self.set_state(entity_id=entity_id, state='reject', **kwargs)
def pending(self, entity_id, **kwargs) -> 'Account':
"""Set the account as pending
Args:
entity_id(int): Entity id
**kwargs: Optional args
Returns(Account): Account resource
"""
return self.set_state(entity_id=entity_id, state='make_pending', **kwargs)
class Applications(DefaultStateClient):
def __init__(self, *args, entity_name='application', entity_collection='applications',
per_page=None, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/applications'
def change_plan(self, entity_id: int, plan_id: int, **kwargs):
log.info("[PLAN] Change plan for application (%s) to %s %s", entity_id, plan_id, kwargs)
params = dict(plan_id=plan_id)
url = self._entity_url(entity_id=entity_id) + '/change_plan'
response = self.rest.put(url=url, json=params, **kwargs)
instance = utils.extract_response(response=response)
return instance
def customize_plan(self, entity_id: int, **kwargs):
log.info("[PLAN] Customize plan for application (%s) %s", entity_id, kwargs)
url = self._entity_url(entity_id=entity_id) + '/customize_plan'
response = self.rest.put(url=url, **kwargs)
instance = utils.extract_response(response=response)
return instance
def decustomize_plan(self, entity_id: int, **kwargs):
log.info("[PLAN] Decustomize plan for application (%s) %s", entity_id, kwargs)
url = self._entity_url(entity_id=entity_id) + '/decustomize_plan'
response = self.rest.put(url=url, **kwargs)
instance = utils.extract_response(response=response)
return instance
def accept(self, entity_id: int, **kwargs):
self.set_state(entity_id=entity_id, state='accept', **kwargs)
def suspend(self, entity_id: int, **kwargs):
self.set_state(entity_id=entity_id, state='suspend', **kwargs)
def resume(self, entity_id: int, **kwargs):
self.set_state(entity_id=entity_id, state='resume', **kwargs)
class DevPortalAuthProviders(DefaultClient):
def __init__(self, *args, entity_name='authentication_provider',
entity_collection='authentication_providers', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/authentication_providers'
class ApplicationReferrerFilters(DefaultClient):
def __init__(self, *args, entity_name='application', entity_collection='applications',
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/referrer_filters'
class ApplicationKeys(DefaultClient):
def __init__(self, *args, entity_name='application', entity_collection='applications',
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/keys'
class Providers(DefaultClient):
def __init__(self, *args, entity_name='user', entity_collection='users', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/providers'
def create_token(self, entity_id: int, params, **kwargs):
log.info(self._log_message("[TOKEN] Create token",
entity_id=entity_id, body=params, **kwargs))
url = self._entity_url(entity_id=entity_id) + '/access_tokens'
response = self.rest.put(url, json=params)
return utils.extract_response(response=response)
class AccessTokens(DefaultClient):
def __init__(self, *args, entity_name='access_token', entity_collection='access_tokens',
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/personal/access_tokens'
class ActiveDocs(DefaultClient):
def __init__(self, *args, entity_name='api_doc', entity_collection='api_docs', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/active_docs'
class Analytics(DefaultClient):
def _list_by_resource(self, resource_id: int, resource_type, metric_name: str = 'hits',
since=None, period: str = 'year', **kwargs):
log.info("List analytics by %s (%s) for metric (#%s)", resource_type, resource_id,
metric_name)
params = dict(
metric_name=metric_name,
since=since,
period=period,
**kwargs
)
url = self.threescale_client.url + f"/stats/{resource_type}/{resource_id}/usage"
response = self.rest.get(url, json=params)
return utils.extract_response(response=response)
def list_by_application(self, application: Union['Application', int], **kwargs):
app_id = _extract_entity_id(application)
return self._list_by_resource(resource_id=app_id, resource_type='applications', **kwargs)
def list_by_service(self, service: Union['Service', int], **kwargs):
app_id = _extract_entity_id(service)
return self._list_by_resource(resource_id=app_id, resource_type='services', **kwargs)
def list_by_backend(self, backend: Union['Backend', int], **kwargs):
backend_id = _extract_entity_id(backend)
return self._list_by_resource(
resource_id=backend_id, resource_type='backend_apis', **kwargs)
class Tenants(DefaultClient):
def __init__(self, *args, entity_name='tenant', entity_collection='tenants', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
def read(self, entity_id, **kwargs):
log.debug(self._log_message("[GET] Read Tenant", args=kwargs))
url = self._entity_url(entity_id=entity_id)
response = self.rest.get(url=url, **kwargs)
instance = self._create_instance(response=response)
return instance
@property
def url(self) -> str:
return self.threescale_client.master_api_url + '/providers'
def trigger_billing(self, tenant: Union['Tenant', int], date: str):
"""Trigger billing for whole tenant
Args:
tenant: Tenant id or tenant resource
date: Date for billing
Returns(bool): True if successful
"""
provider_id = _extract_entity_id(tenant)
url = self.url + f"/{provider_id}/billing_jobs"
params = dict(date=date)
response = self.rest.post(url=url, json=params)
return response.ok
def trigger_billing_account(self, tenant: Union['Tenant', int], account: Union['Account', int],
date: str) -> dict:
"""Trigger billing for one account in tenant
Args:
tenant: Tenant id or tenant resource
account: Account id or account resource
date: Date for billing
Returns(bool): True if successful
"""
account_id = _extract_entity_id(account)
provider_id = _extract_entity_id(tenant)
url = self.url + f"/{provider_id}/accounts/{account_id}/billing_jobs"
params = dict(date=date)
response = self.rest.post(url=url, json=params)
return response.ok
class Proxies(DefaultClient):
def __init__(self, *args, entity_name='proxy', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/proxy'
def deploy(self) -> 'Proxy':
log.info("[DEPLOY] %s to Staging", self._entity_name)
url = f'{self.url}/deploy'
response = self.rest.post(url)
instance = self._create_instance(response=response)
return instance
@property
def oidc(self) -> 'OIDCConfigs':
return OIDCConfigs(self)
@property
def mapping_rules(self) -> 'MappingRules':
return MappingRules(parent=self, instance_klass=MappingRule)
class ProxyConfigs(DefaultClient):
def __init__(self, *args, entity_name='proxy_config', entity_collection='configs',
env: str = None, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
self._env = env
@property
def url(self) -> str:
base = self.parent.url + '/configs'
return base if not self._env else f"{base}/{self._env}"
@property
def proxy(self) -> 'Proxy':
return self.parent
@property
def service(self) -> 'Service':
return self.proxy.service
# tests/integration/test_integration_services.py::test_service_list_configs
# defines usage in a form proxy.configs.list(env='staging').
# To reflect this (good tests are considered immutable and defining the behavior)
# list method has to be customized
def list(self, **kwargs):
if "env" in kwargs:
self._env = kwargs["env"]
del (kwargs["env"])
return super().list(**kwargs)
def promote(self, version: int = 1, from_env: str = 'sandbox', to_env: str = 'production',
**kwargs) -> 'Proxy':
log.info("[PROMOTE] %s version %s from %s to %s", self.service, version, from_env,
to_env)
url = f'{self.url}/{from_env}/{version}/promote'
params = dict(to=to_env)
kwargs.update()
response = self.rest.post(url, json=params, **kwargs)
instance = self._create_instance(response=response)
return instance
def latest(self, env: str = "sandbox") -> 'ProxyConfig':
log.info("[LATEST] Get latest proxy configuration of %s", env)
self._env = env
url = self.url + '/latest'
response = self.rest.get(url=url)
instance = self._create_instance(response=response)
return instance
def version(self, version: int = 1, env: str = "sandbox") -> 'ProxyConfig':
log.info("[VERSION] Get proxy configuration of %s of version %s", env, version)
self._env = env
url = f'{self.url}/{version}'
response = self.rest.get(url=url)
instance = self._create_instance(response=response)
return instance
class SettingsClient(DefaultClient):
def __init__(self, *args, entity_name='settings', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/settings'
class AdminPortalAuthProviders(DefaultClient):
def __init__(self, *args, entity_name='authentication_provider',
entity_collection='authentication_providers', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/account/authentication_providers'
class UserPermissionsClient(DefaultClient):
def __init__(self, *args, entity_name='permissions', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/tenants'
class Policies(DefaultClient):
def __init__(self, *args, entity_name='policy', entity_collection='policies', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return f"{self.parent.url}/{self._entity_collection}"
def append(self, *policies):
params = self.list().entity
params["policies_config"].extend(policies)
params["service_id"] = self.parent["service_id"]
return self.update(params=params)
def insert(self, index: int, *policies):
params = self.list().entity
for (i, policy) in enumerate(policies):
params["policies_config"].insert(index + i, policy)
params["service_id"] = self.parent["service_id"]
return self.update(params=params)
class OIDCConfigs(DefaultClient):
@property
def url(self) -> str:
return self.parent.url + '/oidc_configuration'
def update(self, params: dict = None, **kwargs) -> dict:
return self.rest.patch(url=self.url, json=params, **kwargs).json()
def read(self, params: dict = None, **kwargs) -> dict:
return self.rest.get(url=self.url, json=params, **kwargs).json()
class Backends(DefaultPaginationClient):
def __init__(self, *args, entity_name='backend_api',
entity_collection='backend_apis', per_page=500, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/backend_apis'
class BackendMetrics(Metrics):
def __init__(self, *args, entity_name='metric', entity_collection='metrics', per_page=500,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page,
**kwargs)
class BackendMappingRules(MappingRules):
def __init__(self, *args, entity_name='mapping_rule',
entity_collection='mapping_rules', per_page=500, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page,
**kwargs)
class BackendUsages(Services):
def __init__(self, *args, entity_name='backend_usage',
entity_collection='backend_usages', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/backend_usages'
class PoliciesRegistry(DefaultClient):
def __init__(self, *args, entity_name='policy', entity_collection='policies', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/registry/policies'
class ProviderAccounts(DefaultClient):
"""
3scale endpoints implement only GET and UPDATE methods
"""
def __init__(self, *args, entity_name='account', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/provider'
def fetch(self, **kwargs) -> DefaultResource:
"""
Fetch the current Provider Account (Tenant) entity from admin_api_url endpoint.
Only one Provider Account (currently used Tenant) is reachable via admin_api_url,
therefore `entity_id` is not required.
"""
log.debug(self._log_message("[FETCH] Fetch Current Provider Account (Tenant) ",
args=kwargs))
response = self.rest.get(url=self.url, **kwargs)
instance = self._create_instance(response=response)
return instance
def update(self, params: dict = None, **kwargs) -> 'DefaultResource':
return super().update(params=params)
class ProviderAccountUsers(DefaultStateClient):
"""
Client for Provider Accounts.
In 3scale, entity under Account Settings > Users
"""
def __init__(self, *args, entity_name='user', entity_collection='users', per_page=None,
**kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/users'
def permissions_update(self, entity_id: int,
allowed_services: [] = None, allowed_sections: [] = None, **kwargs):
allowed_services = allowed_services if allowed_services else ['[]']
allowed_sections = allowed_sections if allowed_sections else ['[]']
log.info(self._log_message("Change of Provider Account (User) permissions"))
url = self._entity_url(entity_id) + '/permissions'
params = {
'allowed_service_ids[]': allowed_services,
'allowed_sections[]': allowed_sections,
}
response = self.rest.put(url=url, data=params, **kwargs)
return response.json()
def allow_all_sections(self, entity_id: int, **kwargs):
log.info(self._log_message("Change of Provider Account (User) "
"permissions to all available permissions"))
return self.permissions_update(entity_id=entity_id, allowed_sections=[
'portal', 'finance', 'settings', 'partners', 'monitoring', 'plans', 'policy_registry'
])
def permissions_read(self, entity_id: int, **kwargs):
url = self._entity_url(entity_id) + '/permissions'
response = self.rest.get(url=url, **kwargs)
return response.json()
def set_role_member(self, entity_id: int):
log.info("Changes the role of the user of the provider account to member")
return self.set_state(entity_id, state='member')
def set_role_admin(self, entity_id: int):
log.info("Changes the role of the provider account to admin")
return self.set_state(entity_id, state='admin')
def suspend(self, entity_id):
log.info("Changes the state of the user of the provider account to suspended")
return self.set_state(entity_id, state='suspend')
def unsuspend(self, entity_id: int):
log.info("Revokes the suspension of a user of the provider account")
return self.set_state(entity_id, state='unsuspend')
def activate(self, entity_id: int):
log.info("Changes the state of the user of the provider account to active")
return self.set_state(entity_id, state='activate')
class Webhooks(DefaultClient):
"""
Default client for webhooks
"""
def __init__(self, *args, entity_name='webhook', entity_collection='webhooks', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/webhooks'
def update(self, params: dict = None, **kwargs):
url = self.url
return self.rest.put(url=url, json=params, **kwargs)
def setup(self, webhook_type, url):
"""
Configure webhooks for given webhooks type
"""
params = {"url": url,
"active": "true",
"provider_actions": "true"}
if webhook_type == "Keys":
params.update({
"application_key_created_on": "true",
"application_key_deleted_on": "true",
"application_key_updated_on": "true"
})
elif webhook_type == "Users":
params.update({
"user_created_on": "true",
"user_updated_on": "true",
"user_deleted_on": "true"
})
elif webhook_type == "Applications":
params.update({
"application_created_on": "true",
"application_updated_on": "true",
"application_suspended_on": "true",
"application_plan_changed_on": "true",
"application_user_key_updated_on": "true",
"application_deleted_on": "true"
})
elif webhook_type == "Accounts":
params.update({
"account_created_on": "true",
"account_updated_on": "true",
"account_deleted_on": "true",
"account_plan_changed_on": "true"
})
return self.update(params=params)
def clear(self):
"""
Configure webhooks to default settings
"""
params = {"url": "",
"active": "false",
"provider_actions": "false",
"account_created_on": "false",
"account_updated_on": "false",
"account_deleted_on": "false",
"user_created_on": "false",
"user_updated_on": "false",
"user_deleted_on": "false",
"application_created_on": "false",
"application_updated_on": "false",
"application_deleted_on": "false",
"account_plan_changed_on": "false",
"application_plan_changed_on": "false",
"application_user_key_updated_on": "false",
"application_key_created_on": "false",
"application_key_deleted_on": "false",
"application_suspended_on": "false",
"application_key_updated_on": "false",
}
return self.update(params=params)
class LineItems(DefaultClient):
"""Default client for LineItems"""
def __init__(self, *args, entity_name='line_item', entity_collection='line_items', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/line_items'
class InvoiceState(Enum):
CANCELLED = "cancelled"
FAILED = "failed"
PAID = "paid"
UNPAID = "unpaid"
PENDING = "pending"
FINALIZED = "finalized"
OPEN = "open"
class Invoices(DefaultPaginationClient):
"""Default client for Invoices"""
def __init__(self, *args, entity_name='invoice', entity_collection='invoices',
per_page=20, **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, per_page=per_page, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.url + '/api/invoices'
@property
def line_items(self) -> LineItems:
return LineItems(parent=self, instance_klass=LineItem)
def list_by_account(self, account: Union['Account', int], **kwargs):
account_id = _extract_entity_id(account)
url = self.threescale_client.url + f"/api/accounts/{account_id}/invoices"
response = self.rest.get(url, **kwargs)
instance = self._create_instance(response=response, collection=True)
return instance
def read_by_account(self, entity_id: int, account: Union['Account', int], **kwargs):
account_id = _extract_entity_id(account)
url = self.threescale_client.url + f"/api/accounts/{account_id}/invoices/{entity_id}"
response = self.rest.get(url, **kwargs)
instance = self._create_instance(response=response)
return instance
def state_update(self, entity_id: int, state: InvoiceState, **kwargs):
"""
Update the state of the Invoice.
Values allowed (depend on the previous state):
cancelled, failed, paid, unpaid, pending, finalized
"""
log.info("[Invoice] state changed for invoice (%s): %s", entity_id, state)
params = dict(state=state.value)
url = self._entity_url(entity_id) + '/state'
response = self.rest.put(url=url, json=params, **kwargs)
instance = self._create_instance(response=response)
return instance
def charge(self, entity_id: int):
"""Charge an Invoice."""
log.info("[Invoice] charge invoice (%s)", entity_id)
url = self._entity_url(entity_id) + '/charge'
response = self.rest.post(url)
instance = self._create_instance(response=response)
return instance
class PaymentTransactions(DefaultClient):
def __init__(self, *args, entity_name='payment_transaction',
entity_collection='payment_transactions', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.parent.url + '/payment_transactions'
class FieldsDefinitions(DefaultClient):
def __init__(self, *args, entity_name='fields_definition',
entity_collection='fields_definitions', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/fields_definitions'
class CmsClient(DefaultPaginationClient):
""" Client for all cms api endpoints. """
def __init__(self, *args, per_page=100, **kwargs):
super().__init__(*args, per_page=per_page, **kwargs)
def _extract_resource(self, response, collection) -> Union[List, Dict]:
extracted = response.json()
if self._entity_collection and self._entity_collection in extracted:
extracted = extracted.get(self._entity_collection)
return extracted
def select_by(self, **params):
"""Select by params - logical "and" Usage example: select_by(role='admin')
Filtering by some params can be done on the backend.
Filters for each class are stored in class variable FILTERS.
Filters are removed because they are not part of function "predicate".
-------------------------------------
| Endpoint | Filters |
-------------------------------------
| Sections #index | parent_id |
| Files #index | section_id |
| Templates #index | type, section_id |
-------------------------------------
Args:
**params: params used for selection
Returns: List of resources
"""
log.debug("[SELECT] By params: %s", params)
filters = {fil: params.pop(fil) for fil in self.FILTERS if fil in params}
def predicate(item):
for (key, val) in params.items():
if item[key] != val:
return False
return True
if filters:
return self.select(predicate=predicate, params=filters)
return self.select(predicate=predicate)
class CmsFiles(CmsClient):
FILTERS = ['parent_id']
""" Client for files. """
def __init__(self, *args, entity_name='file', entity_collection='collection', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/cms/files'
class CmsSections(CmsClient):
FILTERS = ['section_id']
""" Client for sections. """
def __init__(self, *args, entity_name='section', entity_collection='collection', **kwargs):
super().__init__(*args, entity_name=entity_name,
entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/cms/sections'
class CmsTemplates(CmsClient):
FILTERS = ['type'] # , 'section_id']
""" Client for templates. """
def __init__(self, *args, entity_collection='collection', **kwargs):
super().__init__(*args, entity_collection=entity_collection, **kwargs)
@property
def url(self) -> str:
return self.threescale_client.admin_api_url + '/cms/templates'
def publish(self, entity_id, **kwargs):
""" Publish template with entity_id """
log.info("[PUBLISH] %s", entity_id)
url = self._entity_url(entity_id) + '/publish'
response = self.rest.put(url=url, **kwargs)
instance = self._create_instance(response=response)
return instance
def list(self, **kwargs) -> List['DefaultResource']:
"""List all entities
Args:
**kwargs: Optional parameters
Returns(List['DefaultResource]): List of resources
"""
log.info(self._log_message("[LIST] List", args=kwargs))
kwargs.setdefault("params", {})
kwargs["params"].setdefault("content", "true")
kwargs["params"].setdefault("type", self._entity_name)
instance = self._list(**kwargs)
return instance
def select(self, predicate, **kwargs) -> List['DefaultResource']:
"""Select resource s based on the predicate
Args:
predicate: Predicate
**kwargs: Optional args
Returns: List of resources
"""
kwargs.setdefault("params", {})
kwargs["params"].setdefault("content", "true")
kwargs["params"].setdefault("type", self._entity_name)
return [item for item in self._list(**kwargs) if predicate(item)]
def create(self, params: dict = None,
*args, **kwargs) -> 'DefaultResource':
params.update({'type': self._entity_name})
return super().create(params=params, *args, **kwargs)
class CmsPages(CmsTemplates):
""" Client for pages """
def __init__(self, *args, entity_name='page', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
class CmsBuiltinPages(CmsTemplates):
""" Client for builtin pages. """
def __init__(self, *args, entity_name='builtin_page', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
class CmsLayouts(CmsTemplates):
""" Client for layouts """
def __init__(self, *args, entity_name='layout', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
class CmsPartials(CmsTemplates):
""" Client for partials """
def __init__(self, *args, entity_name='partial', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
class CmsBuiltinPartials(CmsTemplates):
""" Client for builtin partials """
def __init__(self, *args, entity_name='builtin_partial', **kwargs):
super().__init__(*args, entity_name=entity_name, **kwargs)
# Resources
class ApplicationPlan(DefaultPlanResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def plans_url(self) -> str:
return self.threescale_client.admin_api_url + f"/application_plans/{self.entity_id}"
@property
def service(self) -> 'Service':
return self.parent
def limits(self, metric: 'Metric' = None) -> 'Limits':
return Limits(self, metric=metric, instance_klass=Limit)
def pricing_rules(self, metric: 'Metric' = None) -> 'PricingRules':
return PricingRules(self, metric=metric, instance_klass=PricingRule)
class Method(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def metric(self) -> 'Metric':
return self.parent
@property
def service(self) -> 'Service':
return self.metric.parent
class Metric(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def service(self) -> 'Service':
return self.parent
@property
def methods(self) -> 'Methods':
return Methods(parent=self, instance_klass=Method, per_page=self.client.per_page)
class MappingRule(DefaultResource):
@property
def proxy(self) -> 'Proxy':
return self.parent
@property
def service(self) -> 'Service':
return self.proxy.service
class ProxyConfig(DefaultResource):
@property
def proxy(self) -> 'Proxy':
return self.parent
@property
def service(self) -> 'Service':
return self.proxy.service
# ProxyConfig is once instantiated with just proxy config obj (for example
# through promote()) other times as dict of key "proxy_configs". This seems
# to be clear bug in the code (this code) and behavior should be always
# consistent. For now keeping inconsistency as it introduces minimal change
# and keeps everything working
def __getitem__(self, key):
if "proxy_configs" in self.entity:
return self.entity["proxy_configs"][key]
return super().__getitem__(key)
# Same problem as in __getitem__.
def __len__(self):
if "proxy_configs" in self.entity:
return len(self.entity["proxy_configs"])
return super().__len__()
class Policy(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def proxy(self) -> 'Proxy':
return self.parent
@property
def service(self) -> 'Service':
return self.proxy.service
class Proxy(DefaultResource):
@property
def url(self) -> str:
return self.client.url
@property
def service(self) -> 'Service':
return self.parent
@property
def mapping_rules(self) -> MappingRules:
return MappingRules(parent=self, instance_klass=MappingRule)
@property
def configs(self) -> 'ProxyConfigs':
return ProxyConfigs(parent=self, instance_klass=ProxyConfig)
@property
def policies(self) -> 'Policies':
return Policies(parent=self, instance_klass=Policy)
def promote(self, **kwargs) -> 'Proxy':
return self.configs.promote(**kwargs)
@property
def policies_registry(self) -> PoliciesRegistry:
return PoliciesRegistry(parent=self, instance_klass=PolicyRegistry)
def deploy(self) -> 'Proxy':
return self.client.deploy()
class Service(DefaultResource):
AUTH_USER_KEY = "1"
AUTH_APP_ID_KEY = "2"
AUTH_OIDC = "oidc"
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def app_plans(self) -> ApplicationPlans:
return ApplicationPlans(parent=self, instance_klass=ApplicationPlan)
@property
def metrics(self) -> Metrics:
return Metrics(parent=self, instance_klass=Metric)
@property
def proxy(self) -> 'Proxies':
return Proxies(parent=self, instance_klass=Proxy)
@property
def mapping_rules(self) -> 'MappingRules':
return self.proxy.mapping_rules
@property
def policies_registry(self) -> 'PoliciesRegistry':
return PoliciesRegistry(parent=self, instance_klass=PoliciesRegistry)
def oidc(self):
return self.proxy.oidc
@property
def backend_usages(self) -> 'BackendUsages':
return BackendUsages(parent=self, instance_klass=BackendUsage)
@property
def active_docs(self) -> 'ActiveDocs':
""" Active docs related to service. """
up_self = self
class Wrap(ActiveDocs):
def list(self, **kwargs) -> List['DefaultResource']:
"""List all ActiveDocs related to this service."""
kwargs.update({'service_id': up_self['id']})
instance = self.select_by(**kwargs)
return instance
return Wrap(parent=self, instance_klass=ActiveDoc)
class ActiveDoc(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class Provider(DefaultResource):
def __init__(self, entity_name='org_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class AccessToken(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class Tenant(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
self.admin_base_url = self["signup"]["account"]["admin_base_url"]
self.admin_token = None
if "access_token" in self['signup']:
self.admin_token = self["signup"]["access_token"]["value"]
@property
def entity_id(self) -> int:
return self.entity["signup"]["account"]["id"]
def wait_tenant_ready(self) -> bool:
"""
When True is returned, there is some chance the tenant is actually ready.
"""
# Ignore ssl, this is about checking whether the initialization has
# been finished.
return self.admin_api(ssl_verify=False).wait_for_tenant()
def admin_api(self, ssl_verify=True, wait=-1) -> 'client.ThreeScaleClient':
"""
Returns admin api client for tenant.
Its strongly recommended to call this with wait=True
"""
return client.ThreeScaleClient(
self.admin_base_url, self.admin_token, ssl_verify=ssl_verify, wait=wait)
def trigger_billing(self, date: str):
"""Trigger billing for whole tenant
Args:
date: Date for billing
Returns(bool): True if successful
"""
return self.threescale_client.tenants.trigger_billing(self, date)
def trigger_billing_account(self, account: Union['Account', int], date: str) -> dict:
"""Trigger billing for one account in tenant
Args:
account: Account id or account resource
date: Date for billing
Returns(bool): True if successful
"""
return self.threescale_client.tenants.trigger_billing_account(self, account, date)
def plan_upgrade(self, plan_id):
"""Upgrade plan to given plan_id"""
return self.client.rest.put(f"{self.url}/plan_upgrade", params={"plan_id": plan_id})
@property
def account(self):
"""Return account of this tenant"""
return Account(
client=self.threescale_client.accounts,
entity=self.entity["signup"]["account"]
)
class Application(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
self._auth_objects = {
Service.AUTH_USER_KEY: auth.UserKeyAuth,
Service.AUTH_APP_ID_KEY: auth.AppIdKeyAuth
}
self._api_client_verify = None
self._client_factory = utils.HttpClient
@property
def account(self) -> 'Account':
return self.parent
@property
def service(self) -> 'Service':
"The service to which this application is bound"
return self.threescale_client.services[self["service_id"]]
@property
def keys(self):
"Application keys"
return ApplicationKeys(parent=self, instance_klass=DefaultResource)
def authobj(self, auth_mode=None, location=None):
"""Returns subclass of requests.auth.BaseAuth to provide authentication
for queries agains 3scale service"""
svc = self.service
auth_mode = auth_mode if auth_mode else svc["backend_version"]
if auth_mode not in self._auth_objects:
raise errors.ThreeScaleApiError(f"Unknown credentials for configuration {auth_mode}")
return self._auth_objects[auth_mode](self, location=location)
def register_auth(self, auth_mode: str, factory):
self._auth_objects[auth_mode] = factory
def api_client(self, endpoint: str = "sandbox_endpoint", verify: bool = None, cert=None,
disable_retry_status_list: Iterable = ()) -> 'utils.HttpClient':
"""This is preconfigured client for the application to run api calls.
To avoid failures due to delays in infrastructure it retries call
in case of certain condition. To modify this behavior customized session
has to be passed. This custom session should have configured all necessary
(e.g. authentication)
:param endpoint: Choose whether 'sandbox_endpoint' or 'endpoint',
defaults to sandbox_endpoint
:param verify: Whether to do ssl verification or not,
by default doesn't change what's in session, defaults to None
:param cert: path to certificates
:param disable_retry_status_list: Iterable collection that represents status codes
that should not be retried.
:return: threescale.utils.HttpClient
Instance property api_client_verify of Application can change default of verify param
to avoid passing non-default value to multiple api_client calls. It is applied whenever
verify param is kept unchanged (None).
"""
if verify is None:
verify = self.api_client_verify
return self._client_factory(self, endpoint, verify, cert, disable_retry_status_list)
@property
def api_client_verify(self) -> bool:
"""Allows to change defaults of SSL verification for api_client (and
test_request); default: None - do not alter library default"""
return self._api_client_verify
@api_client_verify.setter
def api_client_verify(self, value: bool):
self._api_client_verify = value
def test_request(self, relpath=None, verify: bool = None):
"""Quick call to do test request against configured service. This is
equivalent to test request on Integration page from UI
:param relpath: relative path to run the requests,
if not set, preconfigured value is used, defaults to None
:param verify: SSL verification
:return: requests.Response
Instance attribute api_client_verify of Application can change default of verify param
to avoid passing non-default value to multiple test_request calls.
"""
proxy = self.service.proxy.list().entity
relpath = relpath if relpath is not None else proxy["api_test_path"]
client = self.api_client(verify=verify)
return client.get(relpath)
class Account(DefaultResource):
def __init__(self, entity_name='org_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def applications(self) -> Applications:
return Applications(parent=self, instance_klass=Application)
@property
def users(self) -> AccountUsers:
return AccountUsers(parent=self, instance_klass=AccountUser)
def credit_card_set(self, params: dict = None, **kwargs):
url = self.url + "/credit_card"
response = self.client.rest.put(url=url, json=params, **kwargs)
return response
def credit_card_delete(self, params: dict = None, **kwargs):
url = self.url + "/credit_card"
response = self.client.rest.delete(url=url, json=params, **kwargs)
return response
class UserPermissions(DefaultResource):
pass
class AccountUser(DefaultUserResource):
def __init__(self, entity_name='username', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def account(self) -> 'Account':
return self.parent
@property
def permissions(self) -> 'UserPermissionsClient':
return UserPermissionsClient(parent=self, instance_klass=UserPermissions)
class AccountPlan(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class Limit(DefaultResource):
@property
def app_plan(self) -> ApplicationPlan:
return self.parent
class PricingRule(DefaultResource):
@property
def app_plan(self) -> ApplicationPlan:
return self.parent
class Backend(DefaultResource):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def metrics(self) -> 'BackendMetrics':
return BackendMetrics(parent=self, instance_klass=BackendMetric)
@property
def mapping_rules(self) -> 'BackendMappingRules':
return BackendMappingRules(parent=self, instance_klass=BackendMappingRule)
def usages(self) -> list['BackendUsage']:
""" Returns list of backend usages where the backend is used."""
return [usage for service in self.threescale_client.services.list()
for usage in service.backend_usages.select_by(backend_id=self['id'])]
class BackendMetric(Metric):
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class BackendMappingRule(MappingRule):
def __init__(self, **kwargs):
super().__init__(**kwargs)
class BackendUsage(DefaultResource):
def __init__(self, entity_name='', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def backend(self) -> 'Backend':
return Backend(
client=Backends(parent=self, instance_klass=Backend),
entity_id=self['backend_id'])
def _extract_entity_id(entity: Union['DefaultResource', int]):
if isinstance(entity, DefaultResource):
return entity.entity_id
return entity
class PolicyRegistry(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def proxy(self) -> 'Proxy':
return self.parent
@property
def service(self) -> 'Service':
return self.proxy.service
class ProviderAccount(DefaultResource):
def __init__(self, entity_name='org_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
def update(self, params: dict = None, **kwargs) -> 'DefaultResource':
new_params = {**self.entity}
if params:
new_params.update(params)
new_entity = self.client.update(params=new_params, **kwargs)
self._entity = new_entity.entity
return self
class ProviderAccountUser(DefaultStateResource):
def __init__(self, entity_name='username', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
def permissions_update(
self, allowed_services: [] = None, allowed_sections: [] = None, **kwargs):
return self.client.permissions_update(
entity_id=self.entity_id,
allowed_services=allowed_services,
allowed_sections=allowed_sections,
**kwargs
)
def allow_all_sections(self, **kwargs):
return self.client.allow_all_sections(entity_id=self.entity_id, **kwargs)
def permissions_read(self, **kwargs):
return self.client.permissions_read(entity_id=self.entity_id, **kwargs)
def set_role_member(self):
log.info("Changes the role of the user of the provider account to member")
return self.set_state(state='member')
def set_role_admin(self):
log.info("Changes the role of the provider account to admin")
return self.set_state(state='admin')
def suspend(self):
log.info("Changes the state of the user of the provider account to suspended")
return self.set_state(state='suspend')
def unsuspend(self):
log.info("Revokes the suspension of a user of the provider account")
return self.set_state(state='unsuspend')
def activate(self):
log.info("Changes the state of the user of the provider account to active")
return self.set_state(state='activate')
class LineItem(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class Invoice(DefaultResource):
def __init__(self, entity_name='friendly_id', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
@property
def line_items(self) -> LineItems:
return LineItems(parent=self, instance_klass=LineItem)
def state_update(self, state: InvoiceState):
return self.client.state_update(entity_id=self.entity_id, state=state)
def charge(self):
return self.client.charge(entity_id=self.entity_id)
@property
def payment_transactions(self) -> 'PaymentTransactions':
return PaymentTransactions(parent=self, instance_klass=PaymentTransaction)
class PaymentTransaction(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class FieldsDefinition(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class AdminPortalAuthProvider(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class DevPortalAuthProvider(DefaultResource):
def __init__(self, entity_name='name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class CmsFile(DefaultResource):
""" Resource for file """
def __init__(self, entity_name='path', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class CmsSection(DefaultResource):
""" Resource for section. """
def __init__(self, entity_name='id', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class CmsTemplate(DefaultResource):
""" Resource for templates """
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def publish(self, **kwargs):
""" Publish template resource """
return self.client.publish(entity_id=self.entity_id, **kwargs)
class CmsPage(CmsTemplate):
""" Resource for page """
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class CmsLayout(CmsTemplate):
""" Resource for layout """
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs)
class CmsPartial(CmsTemplate):
""" Resource for partials """
def __init__(self, entity_name='system_name', **kwargs):
super().__init__(entity_name=entity_name, **kwargs) | 3scale-api | /3scale_api-0.32.0-py3-none-any.whl/threescale_api/resources.py | resources.py |
Download
========
Release for 2.7 and 3.x (last version I tested was 3.4.3): https://pypi.python.org/pypi/3to2
Abstract
========
lib3to2 is a set of fixers that are intended to backport code written for
Python version 3.x into Python version 2.x. The final target 2.x version is
the latest version of the 2.7 branch, as that is the last release in the Python
2.x branch. Some attempts have been made, however, to make code compatible as
much as possible with versions of Python back to 2.5, and bug reports are still
welcome for Python features only present in 2.6+ that are not addressed by
lib3to2.
This project came about as a Google Summer of Code (TM) project in 2009.
Status
======
Because of the nature of the subject matter, 3to2 is not perfect, so check all
output manually. 3to2 does the bulk of the work, but there is code that simply
cannot be converted into a Python 2 equivalent for one reason or another.
3to2 will either produce working Python 2 code or warn about why it did not.
Any other behavior is a bug and should be reported.
lib3to2's fixers are somewhat well-tested individually, but there is no testing
that is done on interactions between multiple fixers, so most of the bugs in
the future will likely be found there.
Intention
=========
lib3to2 is intended to be a tool in the process of developing code that is
backwards-compatible between Python 3 and Python 2. It is not intended to be a
complete solution for directly backporting Python 3 code, though it can often
be used for this purpose without issue. Sufficiently large packages should be
developed with lib3to2 used throughout the process to avoid backwards-
incompatible code from becoming too embedded.
There are some features of Python 3 that have no equivalent in Python 2, and
though lib3to2 tries to fix as many of these as it can, some features are
beyond its grasp. This is especially true of features not readily detectable
by their syntax alone and extremely subtle features, so make sure that code
using lib3to2 is thoroughly tested.
Repository
==========
lib3to2 resides at http://bitbucket.org/amentajo/lib3to2, where the bug tracker
can be found at http://bitbucket.org/amentajo/lib3to2/issues
Usage
=====
Run "./3to2" to convert stdin ("-"), files or directories given as
arguments. By default, the tool outputs a unified diff-formatted patch on
standard output and a "what was changed" summary on standard error, but the
"-w" option can be given to write back converted files, creating
".bak"-named backup files.
If you are root, you can also install with "./setup.py build" and
"./setup.py install" ("make install" does this for you).
This branch of 3to2 must be run with Python 3.
To install locally (used for running tests as a non-privileged user), the
scripts assume you are using python3.1. Modify accordingly if you are not.
Relationship with lib2to3
=========================
Some of the fixers for lib3to2 are directly copy-pasted from their 2to3
equivalent, with the element of PATTERN and the corresponding transformation
switched places. Most fixers written for this program with a corresponding
2to3 fixer started from a clone of the 2to3 fixer, then modifying that fixer to
work in reverse. I do not claim original authorship of these fixers, but I do
claim that they will work for 3to2, independent of how they work for 2to3.
In addition, this program depends on lib2to3 to implement fixers, test cases,
refactoring, and grammar. Some portions of lib2to3 were modified to be more
generic to support lib3to2's calls.
You should use the latest version of lib2to3 from the Python sandbox rather
than the version (if any) that comes with Python. As a convenience,
"two2three" from the Python Package Index is a recent enough version of lib2to3
renamed to avoid conflicts. To use this package, replace all usage of
"lib2to3" with "two2three" within the 3to2 source files after installing
"two2three" from the PyPI. Depending on the developer's mood, a version of
3to2 may be provided with this change already made. | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/README | README |
from __future__ import print_function
import sys
import os
import difflib
import logging
import shutil
import optparse
from lib2to3 import refactor
from lib2to3 import pygram
def diff_texts(a, b, filename):
"""Return a unified diff of two strings."""
a = a.splitlines()
b = b.splitlines()
return difflib.unified_diff(a, b, filename, filename,
"(original)", "(refactored)",
lineterm="")
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
"""
Prints output to stdout.
"""
def __init__(self, fixers, options, explicit, nobackups, show_diffs):
self.nobackups = nobackups
self.show_diffs = show_diffs
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
self.driver.grammar = pygram.python_grammar_no_print_statement
def refactor_string(self, data, name):
"""Override to keep print statements out of the grammar"""
try:
tree = self.driver.parse_string(data)
except Exception as err:
self.log_error("Can't parse %s: %s: %s",
name, err.__class__.__name__, err)
return
self.log_debug("Refactoring %s", name)
self.refactor_tree(tree, name)
return tree
def log_error(self, msg, *args, **kwargs):
self.errors.append((msg, args, kwargs))
self.logger.error(msg, *args, **kwargs)
def write_file(self, new_text, filename, old_text, encoding):
if not self.nobackups:
# Make backup
backup = filename + ".bak"
if os.path.lexists(backup):
try:
os.remove(backup)
except os.error as err:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
except os.error as err:
self.log_message("Can't rename %s to %s", filename, backup)
# Actually write the new file
write = super(StdoutRefactoringTool, self).write_file
write(new_text, filename, old_text, encoding)
if not self.nobackups:
shutil.copymode(backup, filename)
def print_output(self, old, new, filename, equal):
if equal:
self.log_message("No changes to %s", filename)
else:
self.log_message("Refactored %s", filename)
if self.show_diffs:
for line in diff_texts(old, new, filename):
print(line.encode('utf-8', 'ignore'))
def warn(msg):
print("WARNING: %s" % (msg,), file=sys.stderr)
def main(fixer_pkg, args=None):
"""Main program.
Args:
fixer_pkg: the name of a package where the fixers are located.
args: optional; a list of command line arguments. If omitted,
sys.argv[1:] is used.
Returns a suggested exit status (0, 1, 2).
"""
# Set up option parser
parser = optparse.OptionParser(usage="3to2 [options] file|dir ...")
parser.add_option("-d", "--doctests_only", action="store_true",
help="Fix up doctests only")
parser.add_option("-f", "--fix", action="append", default=[],
help="Each FIX specifies a transformation; default: all")
parser.add_option("-j", "--processes", action="store", default=1,
type="int", help="Run 3to2 concurrently")
parser.add_option("-x", "--nofix", action="append", default=[],
help="Prevent a fixer from being run.")
parser.add_option("-l", "--list-fixes", action="store_true",
help="List available transformations (fixes/fix_*.py)")
parser.add_option("-v", "--verbose", action="store_true",
help="More verbose logging")
parser.add_option("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_option("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files.")
parser.add_option("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
# Parse command line arguments
refactor_stdin = False
options, args = parser.parse_args(args)
if not options.write and options.no_diffs:
warn("not writing files and not printing diffs; that's not very useful")
if not options.write and options.nobackups:
parser.error("Can't use -n without -w")
if options.list_fixes:
print("Available transformations for the -f/--fix option:")
for fixname in refactor.get_all_fix_names(fixer_pkg):
print(fixname)
if not args:
return 0
if not args:
print("At least one file or directory argument required.", file=sys.stderr)
print("Use --help to show usage.", file=sys.stderr)
return 2
if "-" in args:
refactor_stdin = True
if options.write:
print("Can't write to stdin.", file=sys.stderr)
return 2
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
# Initialize the refactoring tool
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
explicit = set()
if options.fix:
all_present = False
for fix in options.fix:
if fix == "all":
all_present = True
else:
explicit.add(fixer_pkg + ".fix_" + fix)
requested = avail_fixes.union(explicit) if all_present else explicit
else:
requested = avail_fixes.union(explicit)
fixer_names = requested.difference(unwanted_fixes)
rt = StdoutRefactoringTool(sorted(fixer_names), None, sorted(explicit),
options.nobackups, not options.no_diffs)
# Refactor all files and directories passed as arguments
if not rt.errors:
if refactor_stdin:
rt.refactor_stdin()
else:
try:
rt.refactor(args, options.write, options.doctests_only,
options.processes)
except refactor.MultiprocessingUnsupported:
assert options.processes > 1
print("Sorry, -j isn't supported on this platform.", file=sys.stderr)
return 1
rt.summarize()
# Return error status (0 if rt.errors is zero)
return int(bool(rt.errors)) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/main.py | main.py |
from lib2to3.pygram import token, python_symbols as syms
from lib2to3.pytree import Leaf, Node
from lib2to3.fixer_util import *
def Star(prefix=None):
return Leaf(token.STAR, '*', prefix=prefix)
def DoubleStar(prefix=None):
return Leaf(token.DOUBLESTAR, '**', prefix=prefix)
def Minus(prefix=None):
return Leaf(token.MINUS, '-', prefix=prefix)
def commatize(leafs):
"""
Accepts/turns: (Name, Name, ..., Name, Name)
Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name)
"""
new_leafs = []
for leaf in leafs:
new_leafs.append(leaf)
new_leafs.append(Comma())
del new_leafs[-1]
return new_leafs
def indentation(node):
"""
Returns the indentation for this node
Iff a node is in a suite, then it has indentation.
"""
while node.parent is not None and node.parent.type != syms.suite:
node = node.parent
if node.parent is None:
return ""
# The first three children of a suite are NEWLINE, INDENT, (some other node)
# INDENT.value contains the indentation for this suite
# anything after (some other node) has the indentation as its prefix.
if node.type == token.INDENT:
return node.value
elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT:
return node.prev_sibling.value
elif node.prev_sibling is None:
return ""
else:
return node.prefix
def indentation_step(node):
"""
Dirty little trick to get the difference between each indentation level
Implemented by finding the shortest indentation string
(technically, the "least" of all of the indentation strings, but
tabs and spaces mixed won't get this far, so those are synonymous.)
"""
r = find_root(node)
# Collect all indentations into one set.
all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT)
if not all_indents:
# nothing is indented anywhere, so we get to pick what we want
return " " # four spaces is a popular convention
else:
return min(all_indents)
def suitify(parent):
"""
Turn the stuff after the first colon in parent's children
into a suite, if it wasn't already
"""
for node in parent.children:
if node.type == syms.suite:
# already in the prefered format, do nothing
return
# One-liners have no suite node, we have to fake one up
for i, node in enumerate(parent.children):
if node.type == token.COLON:
break
else:
raise ValueError("No class suite and no ':'!")
# Move everything into a suite node
suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))])
one_node = parent.children[i+1]
one_node.remove()
one_node.prefix = ''
suite.append_child(one_node)
parent.append_child(suite)
def NameImport(package, as_name=None, prefix=None):
"""
Accepts a package (Name node), name to import it as (string), and
optional prefix and returns a node:
import <package> [as <as_name>]
"""
if prefix is None:
prefix = ""
children = [Name("import", prefix=prefix), package]
if as_name is not None:
children.extend([Name("as", prefix=" "),
Name(as_name, prefix=" ")])
return Node(syms.import_name, children)
_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt)
_import_stmts = (syms.import_name, syms.import_from)
def import_binding_scope(node):
"""
Generator yields all nodes for which a node (an import_stmt) has scope
The purpose of this is for a call to _find() on each of them
"""
# import_name / import_from are small_stmts
assert node.type in _import_stmts
test = node.next_sibling
# A small_stmt can only be followed by a SEMI or a NEWLINE.
while test.type == token.SEMI:
nxt = test.next_sibling
# A SEMI can only be followed by a small_stmt or a NEWLINE
if nxt.type == token.NEWLINE:
break
else:
yield nxt
# A small_stmt can only be followed by either a SEMI or a NEWLINE
test = nxt.next_sibling
# Covered all subsequent small_stmts after the import_stmt
# Now to cover all subsequent stmts after the parent simple_stmt
parent = node.parent
assert parent.type == syms.simple_stmt
test = parent.next_sibling
while test is not None:
# Yes, this will yield NEWLINE and DEDENT. Deal with it.
yield test
test = test.next_sibling
context = parent.parent
# Recursively yield nodes following imports inside of a if/while/for/try/with statement
if context.type in _compound_stmts:
# import is in a one-liner
c = context
while c.next_sibling is not None:
yield c.next_sibling
c = c.next_sibling
context = context.parent
# Can't chain one-liners on one line, so that takes care of that.
p = context.parent
if p is None:
return
# in a multi-line suite
while p.type in _compound_stmts:
if context.type == syms.suite:
yield context
context = context.next_sibling
if context is None:
context = p.parent
p = context.parent
if p is None:
break
def ImportAsName(name, as_name, prefix=None):
new_name = Name(name)
new_as = Name("as", prefix=" ")
new_as_name = Name(as_name, prefix=" ")
new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name])
if prefix is not None:
new_node.prefix = prefix
return new_node
def future_import(feature, node):
root = find_root(node)
if does_tree_import("__future__", feature, node):
return
insert_pos = 0
for idx, node in enumerate(root.children):
if node.type == syms.simple_stmt and node.children and \
node.children[0].type == token.STRING:
insert_pos = idx + 1
break
for thing_after in root.children[insert_pos:]:
if thing_after.type == token.NEWLINE:
insert_pos += 1
continue
prefix = thing_after.prefix
thing_after.prefix = ""
break
else:
prefix = ""
import_ = FromImport("__future__", [Leaf(token.NAME, feature, prefix=" ")])
children = [import_, Newline()]
root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix))
def parse_args(arglist, scheme):
"""
Parse a list of arguments into a dict
"""
arglist = [i for i in arglist if i.type != token.COMMA]
ret_mapping = dict([(k, None) for k in scheme])
for i, arg in enumerate(arglist):
if arg.type == syms.argument and arg.children[1].type == token.EQUAL:
# argument < NAME '=' any >
slot = arg.children[0].value
ret_mapping[slot] = arg.children[2]
else:
slot = scheme[i]
ret_mapping[slot] = arg
return ret_mapping | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixer_util.py | fixer_util.py |
import os
from distutils import log
from distutils.command.build_py import build_py
from lib2to3 import refactor
from lib2to3 import pygram
class DistutilsRefactoringTool(refactor.RefactoringTool):
"""Refactoring tool for lib3to2 building"""
def __init__(self, fixers, options=None, explicit=None):
super(DistutilsRefactoringTool, self).__init__(fixers, options, explicit)
self.driver.grammar = pygram.python_grammar_no_print_statement
def refactor_string(self, data, name):
"""Override to keep print statements out of the grammar"""
try:
tree = self.driver.parse_string(data)
except Exception, err:
self.log_error("Can't parse %s: %s: %s",
name, err.__class__.__name__, err)
return
self.log_debug("Refactoring %s", name)
self.refactor_tree(tree, name)
return tree
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
def run_3to2(files, fixer_names=None, options=None, explicit=None):
"""Invoke 3to2 on a list of Python files.
The files should all come from the build area, as the
modification is done in-place. To reduce the build time,
only files modified since the last invocation of this
function should be passed in the files argument."""
if not files:
return
if fixer_names is None:
fixer_names = refactor.get_fixers_from_package('lib3to2.fixes')
r = DistutilsRefactoringTool(fixer_names, options=options)
r.refactor(files, write=True)
def copydir_run_3to2(src, dest, template=None, fixer_names=None,
options=None, explicit=None):
"""Recursively copy a directory, only copying new and changed files,
running run_3to2 over all newly copied Python modules afterward.
If you give a template string, it's parsed like a MANIFEST.in.
"""
from distutils.dir_util import mkpath
from distutils.file_util import copy_file
from distutils.filelist import FileList
filelist = FileList()
curdir = os.getcwd()
os.chdir(src)
try:
filelist.findall()
finally:
os.chdir(curdir)
filelist.files[:] = filelist.allfiles
if template:
for line in template.splitlines():
line = line.strip()
if not line: continue
filelist.process_template_line(line)
copied = []
for filename in filelist.files:
outname = os.path.join(dest, filename)
mkpath(os.path.dirname(outname))
res = copy_file(os.path.join(src, filename), outname, update=1)
if res[1]: copied.append(outname)
run_3to2([fn for fn in copied if fn.lower().endswith('.py')],
fixer_names=fixer_names, options=options, explicit=explicit)
return copied
class Mixin3to2:
'''Mixin class for commands that run 3to2.
To configure 3to2, setup scripts may either change
the class variables, or inherit from individual commands
to override how 3to2 is invoked.'''
# provide list of fixers to run;
# defaults to all from lib3to2.fixers
fixer_names = None
# options dictionary
options = None
# list of fixers to invoke even though they are marked as explicit
explicit = None
def run_3to2(self, files):
return run_3to2(files, self.fixer_names, self.options, self.explicit)
class build_py_3to2(build_py, Mixin3to2):
def run(self):
self.updated_files = []
# Base class code
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
# 3to2
self.run_3to2(self.updated_files)
# Remaining base class code
self.byte_compile(self.get_outputs(include_bytecode=0))
def build_module(self, module, module_file, package):
res = build_py.build_module(self, module, module_file, package)
if res[1]:
# file was copied
self.updated_files.append(res[0])
return res | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/build.py | build.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma
from ..fixer_util import token, syms, Leaf, Node, Star, indentation, ImportAsName
TK_BASE_NAMES = ('ACTIVE', 'ALL', 'ANCHOR', 'ARC','BASELINE', 'BEVEL', 'BOTH',
'BOTTOM', 'BROWSE', 'BUTT', 'CASCADE', 'CENTER', 'CHAR',
'CHECKBUTTON', 'CHORD', 'COMMAND', 'CURRENT', 'DISABLED',
'DOTBOX', 'E', 'END', 'EW', 'EXCEPTION', 'EXTENDED', 'FALSE',
'FIRST', 'FLAT', 'GROOVE', 'HIDDEN', 'HORIZONTAL', 'INSERT',
'INSIDE', 'LAST', 'LEFT', 'MITER', 'MOVETO', 'MULTIPLE', 'N',
'NE', 'NO', 'NONE', 'NORMAL', 'NS', 'NSEW', 'NUMERIC', 'NW',
'OFF', 'ON', 'OUTSIDE', 'PAGES', 'PIESLICE', 'PROJECTING',
'RADIOBUTTON', 'RAISED', 'READABLE', 'RIDGE', 'RIGHT',
'ROUND', 'S', 'SCROLL', 'SE', 'SEL', 'SEL_FIRST', 'SEL_LAST',
'SEPARATOR', 'SINGLE', 'SOLID', 'SUNKEN', 'SW', 'StringTypes',
'TOP', 'TRUE', 'TclVersion', 'TkVersion', 'UNDERLINE',
'UNITS', 'VERTICAL', 'W', 'WORD', 'WRITABLE', 'X', 'Y', 'YES',
'wantobjects')
PY2MODULES = {
'urllib2' : (
'AbstractBasicAuthHandler', 'AbstractDigestAuthHandler',
'AbstractHTTPHandler', 'BaseHandler', 'CacheFTPHandler',
'FTPHandler', 'FileHandler', 'HTTPBasicAuthHandler',
'HTTPCookieProcessor', 'HTTPDefaultErrorHandler',
'HTTPDigestAuthHandler', 'HTTPError', 'HTTPErrorProcessor',
'HTTPHandler', 'HTTPPasswordMgr',
'HTTPPasswordMgrWithDefaultRealm', 'HTTPRedirectHandler',
'HTTPSHandler', 'OpenerDirector', 'ProxyBasicAuthHandler',
'ProxyDigestAuthHandler', 'ProxyHandler', 'Request',
'StringIO', 'URLError', 'UnknownHandler', 'addinfourl',
'build_opener', 'install_opener', 'parse_http_list',
'parse_keqv_list', 'randombytes', 'request_host', 'urlopen'),
'urllib' : (
'ContentTooShortError', 'FancyURLopener','URLopener',
'basejoin', 'ftperrors', 'getproxies',
'getproxies_environment', 'localhost', 'pathname2url',
'quote', 'quote_plus', 'splitattr', 'splithost',
'splitnport', 'splitpasswd', 'splitport', 'splitquery',
'splittag', 'splittype', 'splituser', 'splitvalue',
'thishost', 'unquote', 'unquote_plus', 'unwrap',
'url2pathname', 'urlcleanup', 'urlencode', 'urlopen',
'urlretrieve',),
'urlparse' : (
'parse_qs', 'parse_qsl', 'urldefrag', 'urljoin',
'urlparse', 'urlsplit', 'urlunparse', 'urlunsplit'),
'dbm' : (
'ndbm', 'gnu', 'dumb'),
'anydbm' : (
'error', 'open'),
'whichdb' : (
'whichdb',),
'BaseHTTPServer' : (
'BaseHTTPRequestHandler', 'HTTPServer'),
'CGIHTTPServer' : (
'CGIHTTPRequestHandler',),
'SimpleHTTPServer' : (
'SimpleHTTPRequestHandler',),
'FileDialog' : TK_BASE_NAMES + (
'FileDialog', 'LoadFileDialog', 'SaveFileDialog',
'dialogstates', 'test'),
'tkFileDialog' : (
'Directory', 'Open', 'SaveAs', '_Dialog', 'askdirectory',
'askopenfile', 'askopenfilename', 'askopenfilenames',
'askopenfiles', 'asksaveasfile', 'asksaveasfilename'),
'SimpleDialog' : TK_BASE_NAMES + (
'SimpleDialog',),
'tkSimpleDialog' : TK_BASE_NAMES + (
'askfloat', 'askinteger', 'askstring', 'Dialog'),
'SimpleXMLRPCServer' : (
'CGIXMLRPCRequestHandler', 'SimpleXMLRPCDispatcher',
'SimpleXMLRPCRequestHandler', 'SimpleXMLRPCServer',
'list_public_methods', 'remove_duplicates',
'resolve_dotted_attribute'),
'DocXMLRPCServer' : (
'DocCGIXMLRPCRequestHandler', 'DocXMLRPCRequestHandler',
'DocXMLRPCServer', 'ServerHTMLDoc','XMLRPCDocGenerator'),
}
MAPPING = { 'urllib.request' :
('urllib2', 'urllib'),
'urllib.error' :
('urllib2', 'urllib'),
'urllib.parse' :
('urllib2', 'urllib', 'urlparse'),
'dbm.__init__' :
('anydbm', 'whichdb'),
'http.server' :
('CGIHTTPServer', 'SimpleHTTPServer', 'BaseHTTPServer'),
'tkinter.filedialog' :
('tkFileDialog', 'FileDialog'),
'tkinter.simpledialog' :
('tkSimpleDialog', 'SimpleDialog'),
'xmlrpc.server' :
('DocXMLRPCServer', 'SimpleXMLRPCServer'),
}
# helps match 'http', as in 'from http.server import ...'
simple_name = "name='%s'"
# helps match 'server', as in 'from http.server import ...'
simple_attr = "attr='%s'"
# helps match 'HTTPServer', as in 'from http.server import HTTPServer'
simple_using = "using='%s'"
# helps match 'urllib.request', as in 'import urllib.request'
dotted_name = "dotted_name=dotted_name< %s '.' %s >"
# helps match 'http.server', as in 'http.server.HTTPServer(...)'
power_twoname = "pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >"
# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)'
power_onename = "pow=power< %s trailer< '.' using=any > any* >"
# helps match 'from http.server import HTTPServer'
# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler'
# also helps match 'from http.server import *'
from_import = "from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >"
# helps match 'import urllib.request'
name_import = "name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >"
#############
# WON'T FIX #
#############
# helps match 'import urllib.request as name'
name_import_rename = "name_import_rename=dotted_as_name< %s 'as' renamed=any >"
# helps match 'from http import server'
from_import_rename = "from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >"
def all_modules_subpattern():
"""
Builds a pattern for all toplevel names
(urllib, http, etc)
"""
names_dot_attrs = [mod.split(".") for mod in MAPPING]
ret = "( " + " | ".join([dotted_name % (simple_name % (mod[0]),
simple_attr % (mod[1])) for mod in names_dot_attrs])
ret += " | "
ret += " | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == "__init__"]) + " )"
return ret
def all_candidates(name, attr, MAPPING=MAPPING):
"""
Returns all candidate packages for the name.attr
"""
dotted = name + '.' + attr
assert dotted in MAPPING, "No matching package found."
ret = MAPPING[dotted]
if attr == '__init__':
return ret + (name,)
return ret
def new_package(name, attr, using, MAPPING=MAPPING, PY2MODULES=PY2MODULES):
"""
Returns which candidate package for name.attr provides using
"""
for candidate in all_candidates(name, attr, MAPPING):
if using in PY2MODULES[candidate]:
break
else:
candidate = None
return candidate
def build_import_pattern(mapping1, mapping2):
"""
mapping1: A dict mapping py3k modules to all possible py2k replacements
mapping2: A dict mapping py2k modules to the things they do
This builds a HUGE pattern to match all ways that things can be imported
"""
# py3k: urllib.request, py2k: ('urllib2', 'urllib')
yield from_import % (all_modules_subpattern())
for py3k, py2k in mapping1.items():
name, attr = py3k.split('.')
s_name = simple_name % (name)
s_attr = simple_attr % (attr)
d_name = dotted_name % (s_name, s_attr)
yield name_import % (d_name)
yield power_twoname % (s_name, s_attr)
if attr == '__init__':
yield name_import % (s_name)
yield power_onename % (s_name)
yield name_import_rename % (d_name)
yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr)
def name_import_replacement(name, attr):
children = [Name("import")]
for c in all_candidates(name.value, attr.value):
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
replacement = Node(syms.import_name, children)
return replacement
class FixImports2(fixer_base.BaseFix):
run_order = 4
PATTERN = " | \n".join(build_import_pattern(MAPPING, PY2MODULES))
def transform(self, node, results):
# The patterns dictate which of these names will be defined
name = results.get("name")
attr = results.get("attr")
if attr is None:
attr = Name("__init__")
using = results.get("using")
in_list = results.get("in_list")
imp_list = results.get("imp_list")
power = results.get("pow")
before = results.get("before")
after = results.get("after")
d_name = results.get("dotted_name")
# An import_stmt is always contained within a simple_stmt
simple_stmt = node.parent
# The parent is useful for adding new import_stmts
parent = simple_stmt.parent
idx = parent.children.index(simple_stmt)
if any((results.get("from_import_rename") is not None,
results.get("name_import_rename") is not None)):
self.cannot_convert(node, reason="ambiguity: import binds a single name")
elif using is None and not in_list:
# import urllib.request, single-name import
replacement = name_import_replacement(name, attr)
replacement.prefix = node.prefix
node.replace(replacement)
elif using is None:
# import ..., urllib.request, math, http.sever, ...
for d_name in imp_list:
if d_name.type == syms.dotted_name:
name = d_name.children[0]
attr = d_name.children[2]
elif d_name.type == token.NAME and d_name.value + ".__init__" in MAPPING:
name = d_name
attr = Name("__init__")
else:
continue
if name.value + "." + attr.value not in MAPPING:
continue
candidates = all_candidates(name.value, attr.value)
children = [Name("import")]
for c in candidates:
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
# Put in the new statement.
indent = indentation(simple_stmt)
next_stmt = Node(syms.simple_stmt, [Node(syms.import_name, children), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
# Remove the old imported name
test_comma = d_name.next_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
elif test_comma is None:
test_comma = d_name.prev_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
d_name.remove()
if not in_list.children:
simple_stmt.remove()
elif in_list is not None:
##########################################################
# "from urllib.request import urlopen, urlretrieve, ..." #
# Replace one import statement with potentially many. #
##########################################################
packages = dict([(n,[]) for n in all_candidates(name.value,
attr.value)])
# Figure out what names need to be imported from what
# Add them to a dict to be parsed once we're completely done
for imported in using:
if imported.type == token.COMMA:
continue
if imported.type == syms.import_as_name:
test_name = imported.children[0].value
if len(imported.children) > 2:
# 'as' whatever
rename = imported.children[2].value
else:
rename = None
elif imported.type == token.NAME:
test_name = imported.value
rename = None
pkg = new_package(name.value, attr.value, test_name)
packages[pkg].append((test_name, rename))
# Parse the dict to create new import statements to replace this one
imports = []
for new_pkg, names in packages.items():
if not names:
# Didn't import anything from that package, move along
continue
new_names = []
for test_name, rename in names:
if rename is None:
new_names.append(Name(test_name, prefix=" "))
else:
new_names.append(ImportAsName(test_name, rename, prefix=" "))
new_names.append(Comma())
new_names.pop()
imports.append(FromImport(new_pkg, new_names))
# Replace this import statement with one of the others
replacement = imports.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
# Add the remainder of the imports as new statements.
while imports:
next_stmt = Node(syms.simple_stmt, [imports.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif using.type == token.STAR:
# from urllib.request import *
nodes = [FromImport(pkg, [Star(prefix=" ")]) for pkg in
all_candidates(name.value, attr.value)]
replacement = nodes.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
while nodes:
next_stmt = Node(syms.simple_stmt, [nodes.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif power is not None:
# urllib.request.urlopen
# Replace it with urllib2.urlopen
pkg = new_package(name.value, attr.value, using.value)
# Remove the trailer node that contains attr.
if attr.parent:
attr.parent.remove()
name.replace(Name(pkg, prefix=name.prefix))
elif using.type == token.NAME:
# from urllib.request import urlopen
pkg = new_package(name.value, attr.value, using.value)
if attr.value == "__init__" and pkg == name.value:
# Replacing "from abc import xyz" with "from abc import xyz"
# Just leave it alone so as not to mess with other fixers
return
else:
node.replace(FromImport(pkg, [using])) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/imports2_fix_alt_formatting.py | imports2_fix_alt_formatting.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import
from lib2to3.pygram import python_symbols as syms
from lib2to3.pgen2 import token
from lib2to3.pytree import Node, Leaf
from ..fixer_util import NameImport
# used in simple_mapping_to_pattern()
MAPPING = {"reprlib": "repr",
"winreg": "_winreg",
"configparser": "ConfigParser",
"copyreg": "copy_reg",
"queue": "Queue",
"socketserver": "SocketServer",
"_markupbase": "markupbase",
"test.support": "test.test_support",
"dbm.bsd": "dbhash",
"dbm.ndbm": "dbm",
"dbm.dumb": "dumbdbm",
"dbm.gnu": "gdbm",
"html.parser": "HTMLParser",
"html.entities": "htmlentitydefs",
"http.client": "httplib",
"http.cookies": "Cookie",
"http.cookiejar": "cookielib",
"tkinter": "Tkinter",
"tkinter.dialog": "Dialog",
"tkinter._fix": "FixTk",
"tkinter.scrolledtext": "ScrolledText",
"tkinter.tix": "Tix",
"tkinter.constants": "Tkconstants",
"tkinter.dnd": "Tkdnd",
"tkinter.__init__": "Tkinter",
"tkinter.colorchooser": "tkColorChooser",
"tkinter.commondialog": "tkCommonDialog",
"tkinter.font": "tkFont",
"tkinter.messagebox": "tkMessageBox",
"tkinter.turtle": "turtle",
"urllib.robotparser": "robotparser",
"xmlrpc.client": "xmlrpclib",
"builtins": "__builtin__",
}
# generic strings to help build patterns
# these variables mean (with http.client.HTTPConnection as an example):
# name = http
# attr = client
# used = HTTPConnection
# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match)
# helps match 'queue', as in 'from queue import ...'
simple_name_match = "name='{name}'"
# helps match 'client', to be used if client has been imported from http
subname_match = "attr='{attr}'"
# helps match 'http.client', as in 'import urllib.request'
dotted_name_match = "dotted_name=dotted_name< {fmt_name} '.' {fmt_attr} >"
# helps match 'queue', as in 'queue.Queue(...)'
power_onename_match = "{fmt_name}"
# helps match 'http.client', as in 'http.client.HTTPConnection(...)'
power_twoname_match = "power< {fmt_name} trailer< '.' {fmt_attr} > any* >"
# helps match 'client.HTTPConnection', if 'client' has been imported from http
power_subname_match = "power< {fmt_attr} any* >"
# helps match 'from http.client import HTTPConnection'
from_import_match = "from_import=import_from< 'from' {fmt_name} 'import' ['('] imported=any [')'] >"
# helps match 'from http import client'
from_import_submod_match = "from_import_submod=import_from< 'from' {fmt_name} 'import' ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any > | import_as_names< any* ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any >) any* > ) >"
# helps match 'import urllib.request'
name_import_match = "name_import=import_name< 'import' {fmt_name} > | name_import=import_name< 'import' dotted_as_name< {fmt_name} 'as' renamed=any > >"
# helps match 'import http.client, winreg'
multiple_name_import_match = "name_import=import_name< 'import' dotted_as_names< names=any* > >"
def all_patterns(name):
"""
Accepts a string and returns a pattern of possible patterns involving that name
Called by simple_mapping_to_pattern for each name in the mapping it receives.
"""
# i_ denotes an import-like node
# u_ denotes a node that appears to be a usage of the name
if '.' in name:
name, attr = name.split('.', 1)
simple_name = simple_name_match.format(name=name)
simple_attr = subname_match.format(attr=attr)
dotted_name = dotted_name_match.format(fmt_name=simple_name, fmt_attr=simple_attr)
i_from = from_import_match.format(fmt_name=dotted_name)
i_from_submod = from_import_submod_match.format(fmt_name=simple_name, fmt_attr=simple_attr)
i_name = name_import_match.format(fmt_name=dotted_name)
u_name = power_twoname_match.format(fmt_name=simple_name, fmt_attr=simple_attr)
u_subname = power_subname_match.format(fmt_attr=simple_attr)
return ' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname))
else:
simple_name = simple_name_match.format(name=name)
i_name = name_import_match.format(fmt_name=simple_name)
i_from = from_import_match.format(fmt_name=simple_name)
u_name = power_onename_match.format(fmt_name=simple_name)
return ' | \n'.join((i_name, i_from, u_name))
class FixImports(fixer_base.BaseFix):
order = "pre"
PATTERN = ' | \n'.join([all_patterns(name) for name in MAPPING])
PATTERN = ' | \n'.join((PATTERN, multiple_name_import_match))
def fix_dotted_name(self, node, mapping=MAPPING):
"""
Accepts either a DottedName node or a power node with a trailer.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
if node.type == syms.dotted_name:
_name = node.children[0]
_attr = node.children[2]
elif node.type == syms.power:
_name = node.children[0]
_attr = node.children[1].children[1]
name = _name.value
attr = _attr.value
full_name = name + '.' + attr
if not full_name in mapping:
return
to_repl = mapping[full_name]
if '.' in to_repl:
repl_name, repl_attr = to_repl.split('.')
_name.replace(Name(repl_name, prefix=_name.prefix))
_attr.replace(Name(repl_attr, prefix=_attr.prefix))
elif node.type == syms.dotted_name:
node.replace(Name(to_repl, prefix=node.prefix))
elif node.type == syms.power:
_name.replace(Name(to_repl, prefix=_name.prefix))
parent = _attr.parent
_attr.remove()
parent.remove()
def fix_simple_name(self, node, mapping=MAPPING):
"""
Accepts a Name leaf.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
assert node.type == token.NAME, repr(node)
if not node.value in mapping:
return
replacement = mapping[node.value]
node.replace(Leaf(token.NAME, str(replacement), prefix=node.prefix))
def fix_submod_import(self, imported, name, node):
"""
Accepts a list of NAME leafs, a name string, and a node
node is given as an argument to BaseFix.transform()
NAME leafs come from an import_as_names node (the children)
name string is the base name found in node.
"""
submods = []
missed = []
for attr in imported:
dotted = '.'.join((name, attr.value))
if dotted in MAPPING:
# get the replacement module
to_repl = MAPPING[dotted]
if '.' not in to_repl:
# it's a simple name, so use a simple replacement.
_import = NameImport(Name(to_repl, prefix=" "), attr.value)
submods.append(_import)
elif attr.type == token.NAME:
missed.append(attr.clone())
if not submods:
return
parent = node.parent
node.replace(submods[0])
if len(submods) > 1:
start = submods.pop(0)
prev = start
for submod in submods:
parent.append_child(submod)
if missed:
self.warning(node, "Imported names not known to 3to2 to be part of the package {0}. Leaving those alone... high probability that this code will be incorrect.".format(name))
children = [Name("from"), Name(name, prefix=" "), Name("import", prefix=" "), Node(syms.import_as_names, missed)]
orig_stripped = Node(syms.import_from, children)
parent.append_child(Newline())
parent.append_child(orig_stripped)
def get_dotted_import_replacement(self, name_node, attr_node, mapping=MAPPING, renamed=None):
"""
For (http, client) given and httplib being the correct replacement,
returns (httplib as client, None)
For (test, support) given and test.test_support being the replacement,
returns (test, test_support as support)
"""
full_name = name_node.value + '.' + attr_node.value
replacement = mapping[full_name]
if '.' in replacement:
new_name, new_attr = replacement.split('.')
if renamed is None:
return Name(new_name, prefix=name_node.prefix), Node(syms.dotted_as_name, [Name(new_attr, prefix=attr_node.prefix), Name('as', prefix=" "), attr_node.clone()])
else:
return Name(new_name, prefix=name_node.prefix), Name(new_attr, prefix=attr_node.prefix)
else:
return Node(syms.dotted_as_name, [Name(replacement, prefix=name_node.prefix), Name('as', prefix=' '), Name(attr_node.value, prefix=attr_node.prefix)]), None
def transform(self, node, results):
from_import = results.get("from_import")
from_import_submod = results.get("from_import_submod")
name_import = results.get("name_import")
dotted_name = results.get("dotted_name")
name = results.get("name")
names = results.get("names")
attr = results.get("attr")
imported = results.get("imported")
if names:
for name in names:
if name.type == token.NAME:
self.fix_simple_name(name)
elif name.type == syms.dotted_as_name:
self.fix_simple_name(name.children[0]) if name.children[0].type == token.NAME else \
self.fix_dotted_name(name.children[0])
elif name.type == syms.dotted_name:
self.fix_dotted_name(name)
elif from_import_submod:
renamed = results.get("renamed")
new_name, new_attr = self.get_dotted_import_replacement(name, attr, renamed=renamed)
if new_attr is not None:
name.replace(new_name)
attr.replace(new_attr)
else:
children = [Name("import"), new_name]
node.replace(Node(syms.import_name, children, prefix=node.prefix))
elif dotted_name:
self.fix_dotted_name(dotted_name)
elif name_import or from_import:
self.fix_simple_name(name)
elif name and not attr:
if does_tree_import(None, MAPPING[name.value], node) and \
is_probably_builtin(name):
self.fix_simple_name(name)
elif name and attr:
# Note that this will fix a dotted name that was never imported. This will probably not matter.
self.fix_dotted_name(node)
elif imported and imported.type == syms.import_as_names:
self.fix_submod_import(imported=imported.children, node=node, name=name.value) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_imports.py | fix_imports.py |
from __future__ import with_statement # Aiming for 2.5-compatible code
from lib2to3 import fixer_base
from lib2to3.pytree import Node, Leaf
from lib2to3.pygram import python_symbols as syms, token
from lib2to3.fixer_util import (Name, FromImport, Newline, Call, Comma, Dot,
LParen, RParen, touch_import)
import warnings
import sys
def gen_printargs(lst):
"""
Accepts a list of all nodes in the print call's trailer.
Yields nodes that will be easier to deal with
"""
for node in lst:
if node.type == syms.arglist:
# arglist<pos=any* kwargs=(argument<"file"|"sep"|"end" "=" any>*)>
kids = node.children
it = kids.__iter__()
try:
while True:
arg = next(it)
if arg.type == syms.argument:
# argument < "file"|"sep"|"end" "=" (any) >
yield arg
next(it)
else:
yield arg
next(it)
except StopIteration:
continue
else:
yield node
def isNone(arg):
"""
Returns True if arg is a None node
"""
return arg.type == token.NAME and arg.value == "None"
def _unicode(arg):
"""
Calls unicode() on the arg in the node.
"""
prefix = arg.prefix
arg = arg.clone()
arg.prefix = ""
ret = Call(Name("unicode", prefix=prefix), [arg])
return ret
def add_file_part(file, lst):
if file is None or isNone(file):
return
lst.append(Leaf(token.RIGHTSHIFT, ">>", prefix=" "))
lst.append(file.clone())
lst.append(Comma())
def add_sep_part(sep, pos, lst):
if sep is not None and not isNone(sep) and \
not (sep.type == token.STRING and sep.value in ("' '", '" "')):
temp = []
for arg in pos:
temp.append(_unicode(arg.clone()))
if sys.version_info >= (2, 6):
warnings.warn("Calling unicode() on what may be a bytes object")
temp.append(Comma())
del temp[-1]
sep = sep.clone()
sep.prefix = " "
args = Node(syms.listmaker, temp)
new_list = Node(syms.atom, [Leaf(token.LSQB, "["), args,
Leaf(token.RSQB, "]")])
join_arg = Node(syms.trailer, [LParen(), new_list, RParen()])
sep_join = Node(syms.power, [sep, Node(syms.trailer,
[Dot(), Name("join")])])
lst.append(sep_join)
lst.append(join_arg)
else:
if pos:
pos[0].prefix = " "
for arg in pos:
lst.append(arg.clone())
lst.append(Comma())
del lst[-1]
def add_end_part(end, file, parent, loc):
if isNone(end):
return
if end.type == token.STRING and end.value in ("' '", '" "',
"u' '", 'u" "',
"b' '", 'b" "'):
return
if file is None:
touch_import(None, "sys", parent)
file = Node(syms.power, [Name("sys"),
Node(syms.trailer, [Dot(), Name("stdout")])])
end_part = Node(syms.power, [file,
Node(syms.trailer, [Dot(), Name("write")]),
Node(syms.trailer, [LParen(), end, RParen()])])
end_part.prefix = " "
parent.insert_child(loc, Leaf(token.SEMI, ";"))
parent.insert_child(loc+1, end_part)
def replace_print(pos, opts, old_node=None):
"""
Replace old_node with a new statement.
Also hacks in the "end" functionality.
"""
new_node = new_print(*pos, **opts)
end = None if "end" not in opts else opts["end"].clone()
file = None if "file" not in opts else opts["file"].clone()
if old_node is None:
parent = Node(syms.simple_stmt, [Leaf(token.NEWLINE, "\n")])
i = 0
else:
parent = old_node.parent
i = old_node.remove()
parent.insert_child(i, new_node)
if end is not None and not (end.type == token.STRING and \
end.value in ("'\\n'", '"\\n"')):
add_end_part(end, file, parent, i+1)
return new_node
def new_print(*pos, **opts):
"""
Constructs a new print_stmt node
args is all positional arguments passed to print()
kwargs contains zero or more of the following mappings:
'sep': some string
'file': some file-like object that supports the write() method
'end': some string
"""
children = [Name("print")]
sep = None if "sep" not in opts else opts["sep"]
file = None if "file" not in opts else opts["file"]
end = None if "end" not in opts else opts["end"]
add_file_part(file, children)
add_sep_part(sep, pos, children)
if end is not None and not isNone(end):
if not end.value in ('"\\n"', "'\\n'"):
children.append(Comma())
return Node(syms.print_stmt, children)
def map_printargs(args):
"""
Accepts a list of all nodes in the print call's trailer.
Returns {'pos':[all,pos,args], 'sep':sep, 'end':end, 'file':file}
"""
printargs = [arg for arg in gen_printargs(args)]
mapping = {}
pos = []
for arg in printargs:
if arg.type == syms.argument:
kids = arg.children
assert kids[0].type == token.NAME, repr(arg)
assert len(kids) > 1, repr(arg)
assert str(kids[0].value) in ("sep", "end", "file")
assert str(kids[0].value) not in mapping, mapping
mapping[str(kids[0].value)] = kids[2]
elif arg.type == token.STAR:
return (None, None)
else:
pos.append(arg)
return (pos, mapping)
class FixPrint(fixer_base.BaseFix):
PATTERN = """
power< 'print' parens=trailer < '(' args=any* ')' > any* >
"""
def match(self, node):
"""
Since the tree needs to be fixed once and only once if and only if it
matches, then we can start discarding matches after we make the first.
"""
return super(FixPrint,self).match(node)
def transform(self, node, results):
args = results.get("args")
if not args:
parens = results.get("parens")
parens.remove()
return
pos, opts = map_printargs(args)
if pos is None or opts is None:
self.cannot_convert(node, "-fprint does not support argument unpacking. fix using -xprint and then again with -fprintfunction.")
return
if "file" in opts and \
"end" in opts and \
opts["file"].type != token.NAME:
self.warning(opts["file"], "file is not a variable name; "\
"print fixer suggests to bind the file to a variable "\
"name first before passing it to print function")
try:
with warnings.catch_warnings(record=True) as w:
new_node = replace_print(pos, opts, old_node=node)
if len(w) > 0:
self.warning(node, "coercing to unicode even though this may be a bytes object")
except AttributeError:
# Python 2.5 doesn't have warnings.catch_warnings, so we're in Python 2.5 code here...
new_node = replace_print(pos, dict([(bytes(k), opts[k]) for k in opts]), old_node=node)
new_node.prefix = node.prefix | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_print.py | fix_print.py |
from lib2to3 import fixer_base
from ..fixer_util import token, indentation, suitify, String, Newline, Comma, DoubleStar, Name
_assign_template = "%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
_if_template = "if '%(name)s' in %(kwargs)s: %(assign)s"
_else_template = "else: %(name)s = %(default)s"
_kwargs_default_name = "_3to2kwargs"
def gen_params(raw_params):
"""
Generator that yields tuples of (name, default_value) for each parameter in the list
If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
"""
assert raw_params[0].type == token.STAR and len(raw_params) > 2
curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
max_idx = len(raw_params)
while curr_idx < max_idx:
curr_item = raw_params[curr_idx]
prev_item = curr_item.prev_sibling
if curr_item.type != token.NAME:
curr_idx += 1
continue
if prev_item is not None and prev_item.type == token.DOUBLESTAR:
break
name = curr_item.value
nxt = curr_item.next_sibling
if nxt is not None and nxt.type == token.EQUAL:
default_value = nxt.next_sibling
curr_idx += 2
else:
default_value = None
yield (name, default_value)
curr_idx += 1
def remove_params(raw_params, kwargs_default=_kwargs_default_name):
"""
Removes all keyword-only args from the params list and a bare star, if any.
Does not add the kwargs dict if needed.
Returns True if more action is needed, False if not
(more action is needed if no kwargs dict exists)
"""
assert raw_params[0].type == token.STAR
if raw_params[1].type == token.COMMA:
raw_params[0].remove()
raw_params[1].remove()
kw_params = raw_params[2:]
else:
kw_params = raw_params[3:]
for param in kw_params:
if param.type != token.DOUBLESTAR:
param.remove()
else:
return False
else:
return True
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
"""
Returns string with the name of the kwargs dict if the params after the first star need fixing
Otherwise returns empty string
"""
found_kwargs = False
needs_fix = False
for t in raw_params[2:]:
if t.type == token.COMMA:
# Commas are irrelevant at this stage.
continue
elif t.type == token.NAME and not found_kwargs:
# Keyword-only argument: definitely need to fix.
needs_fix = True
elif t.type == token.NAME and found_kwargs:
# Return 'foobar' of **foobar, if needed.
return t.value if needs_fix else ''
elif t.type == token.DOUBLESTAR:
# Found either '*' from **foobar.
found_kwargs = True
else:
# Never found **foobar. Return a synthetic name, if needed.
return kwargs_default if needs_fix else ''
class FixKwargs(fixer_base.BaseFix):
run_order = 7 # Run after function annotations are removed
PATTERN = "funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
def transform(self, node, results):
params_rawlist = results["params"]
for i, item in enumerate(params_rawlist):
if item.type == token.STAR:
params_rawlist = params_rawlist[i:]
break
else:
return
# params is guaranteed to be a list starting with *.
# if fixing is needed, there will be at least 3 items in this list:
# [STAR, COMMA, NAME] is the minimum that we need to worry about.
new_kwargs = needs_fixing(params_rawlist)
# new_kwargs is the name of the kwargs dictionary.
if not new_kwargs:
return
suitify(node)
# At this point, params_rawlist is guaranteed to be a list
# beginning with a star that includes at least one keyword-only param
# e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
# [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
# Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
# Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
# We need to insert our new stuff before the first_stmt and change the
# first_stmt's prefix.
suite = node.children[4]
first_stmt = suite.children[2]
ident = indentation(first_stmt)
for name, default_value in gen_params(params_rawlist):
if default_value is None:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_assign_template %{'name':name, 'kwargs':new_kwargs}, prefix=ident))
else:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_else_template %{'name':name, 'default':default_value}, prefix=ident))
suite.insert_child(2, Newline())
suite.insert_child(2, String(_if_template %{'assign':_assign_template %{'name':name, 'kwargs':new_kwargs}, 'name':name, 'kwargs':new_kwargs}, prefix=ident))
first_stmt.prefix = ident
suite.children[2].prefix = ""
# Now, we need to fix up the list of params.
must_add_kwargs = remove_params(params_rawlist)
if must_add_kwargs:
arglist = results['arglist']
if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
arglist.append_child(Comma())
arglist.append_child(DoubleStar(prefix=" "))
arglist.append_child(Name(new_kwargs)) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_kwargs.py | fix_kwargs.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import
from lib2to3.pygram import python_symbols as syms
from lib2to3.pgen2 import token
from lib2to3.pytree import Node, Leaf
from ..fixer_util import NameImport
# used in simple_mapping_to_pattern()
MAPPING = {"reprlib": "repr",
"winreg": "_winreg",
"configparser": "ConfigParser",
"copyreg": "copy_reg",
"queue": "Queue",
"socketserver": "SocketServer",
"_markupbase": "markupbase",
"test.support": "test.test_support",
"dbm.bsd": "dbhash",
"dbm.ndbm": "dbm",
"dbm.dumb": "dumbdbm",
"dbm.gnu": "gdbm",
"html.parser": "HTMLParser",
"html.entities": "htmlentitydefs",
"http.client": "httplib",
"http.cookies": "Cookie",
"http.cookiejar": "cookielib",
"tkinter": "Tkinter",
"tkinter.dialog": "Dialog",
"tkinter._fix": "FixTk",
"tkinter.scrolledtext": "ScrolledText",
"tkinter.tix": "Tix",
"tkinter.constants": "Tkconstants",
"tkinter.dnd": "Tkdnd",
"tkinter.__init__": "Tkinter",
"tkinter.colorchooser": "tkColorChooser",
"tkinter.commondialog": "tkCommonDialog",
"tkinter.font": "tkFont",
"tkinter.messagebox": "tkMessageBox",
"tkinter.turtle": "turtle",
"urllib.robotparser": "robotparser",
"xmlrpc.client": "xmlrpclib",
"builtins": "__builtin__",
}
# generic strings to help build patterns
# these variables mean (with http.client.HTTPConnection as an example):
# name = http
# attr = client
# used = HTTPConnection
# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match)
# helps match 'queue', as in 'from queue import ...'
simple_name_match = "name='%s'"
# helps match 'client', to be used if client has been imported from http
subname_match = "attr='%s'"
# helps match 'http.client', as in 'import urllib.request'
dotted_name_match = "dotted_name=dotted_name< %s '.' %s >"
# helps match 'queue', as in 'queue.Queue(...)'
power_onename_match = "%s"
# helps match 'http.client', as in 'http.client.HTTPConnection(...)'
power_twoname_match = "power< %s trailer< '.' %s > any* >"
# helps match 'client.HTTPConnection', if 'client' has been imported from http
power_subname_match = "power< %s any* >"
# helps match 'from http.client import HTTPConnection'
from_import_match = "from_import=import_from< 'from' %s 'import' imported=any >"
# helps match 'from http import client'
from_import_submod_match = "from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >"
# helps match 'import urllib.request'
name_import_match = "name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >"
# helps match 'import http.client, winreg'
multiple_name_import_match = "name_import=import_name< 'import' dotted_as_names< names=any* > >"
def all_patterns(name):
"""
Accepts a string and returns a pattern of possible patterns involving that name
Called by simple_mapping_to_pattern for each name in the mapping it receives.
"""
# i_ denotes an import-like node
# u_ denotes a node that appears to be a usage of the name
if '.' in name:
name, attr = name.split('.', 1)
simple_name = simple_name_match % (name)
simple_attr = subname_match % (attr)
dotted_name = dotted_name_match % (simple_name, simple_attr)
i_from = from_import_match % (dotted_name)
i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr)
i_name = name_import_match % (dotted_name, dotted_name)
u_name = power_twoname_match % (simple_name, simple_attr)
u_subname = power_subname_match % (simple_attr)
return ' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname))
else:
simple_name = simple_name_match % (name)
i_name = name_import_match % (simple_name, simple_name)
i_from = from_import_match % (simple_name)
u_name = power_onename_match % (simple_name)
return ' | \n'.join((i_name, i_from, u_name))
class FixImports(fixer_base.BaseFix):
order = "pre"
PATTERN = ' | \n'.join([all_patterns(name) for name in MAPPING])
PATTERN = ' | \n'.join((PATTERN, multiple_name_import_match))
def fix_dotted_name(self, node, mapping=MAPPING):
"""
Accepts either a DottedName node or a power node with a trailer.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
if node.type == syms.dotted_name:
_name = node.children[0]
_attr = node.children[2]
elif node.type == syms.power:
_name = node.children[0]
_attr = node.children[1].children[1]
name = _name.value
attr = _attr.value
full_name = name + '.' + attr
if not full_name in mapping:
return
to_repl = mapping[full_name]
if '.' in to_repl:
repl_name, repl_attr = to_repl.split('.')
_name.replace(Name(repl_name, prefix=_name.prefix))
_attr.replace(Name(repl_attr, prefix=_attr.prefix))
elif node.type == syms.dotted_name:
node.replace(Name(to_repl, prefix=node.prefix))
elif node.type == syms.power:
_name.replace(Name(to_repl, prefix=_name.prefix))
parent = _attr.parent
_attr.remove()
parent.remove()
def fix_simple_name(self, node, mapping=MAPPING):
"""
Accepts a Name leaf.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
assert node.type == token.NAME, repr(node)
if not node.value in mapping:
return
replacement = mapping[node.value]
node.replace(Leaf(token.NAME, str(replacement), prefix=node.prefix))
def fix_submod_import(self, imported, name, node):
"""
Accepts a list of NAME leafs, a name string, and a node
node is given as an argument to BaseFix.transform()
NAME leafs come from an import_as_names node (the children)
name string is the base name found in node.
"""
submods = []
missed = []
for attr in imported:
dotted = '.'.join((name, attr.value))
if dotted in MAPPING:
# get the replacement module
to_repl = MAPPING[dotted]
if '.' not in to_repl:
# it's a simple name, so use a simple replacement.
_import = NameImport(Name(to_repl, prefix=" "), attr.value)
submods.append(_import)
elif attr.type == token.NAME:
missed.append(attr.clone())
if not submods:
return
parent = node.parent
node.replace(submods[0])
if len(submods) > 1:
start = submods.pop(0)
prev = start
for submod in submods:
parent.append_child(submod)
if missed:
self.warning(node, "Imported names not known to 3to2 to be part of the package %s. Leaving those alone... high probability that this code will be incorrect." % (name))
children = [Name("from"), Name(name, prefix=" "), Name("import", prefix=" "), Node(syms.import_as_names, missed)]
orig_stripped = Node(syms.import_from, children)
parent.append_child(Newline())
parent.append_child(orig_stripped)
def get_dotted_import_replacement(self, name_node, attr_node, mapping=MAPPING, renamed=None):
"""
For (http, client) given and httplib being the correct replacement,
returns (httplib as client, None)
For (test, support) given and test.test_support being the replacement,
returns (test, test_support as support)
"""
full_name = name_node.value + '.' + attr_node.value
replacement = mapping[full_name]
if '.' in replacement:
new_name, new_attr = replacement.split('.')
if renamed is None:
return Name(new_name, prefix=name_node.prefix), Node(syms.dotted_as_name, [Name(new_attr, prefix=attr_node.prefix), Name('as', prefix=" "), attr_node.clone()])
else:
return Name(new_name, prefix=name_node.prefix), Name(new_attr, prefix=attr_node.prefix)
else:
return Node(syms.dotted_as_name, [Name(replacement, prefix=name_node.prefix), Name('as', prefix=' '), Name(attr_node.value, prefix=attr_node.prefix)]), None
def transform(self, node, results):
from_import = results.get("from_import")
from_import_submod = results.get("from_import_submod")
name_import = results.get("name_import")
dotted_name = results.get("dotted_name")
name = results.get("name")
names = results.get("names")
attr = results.get("attr")
imported = results.get("imported")
if names:
for name in names:
if name.type == token.NAME:
self.fix_simple_name(name)
elif name.type == syms.dotted_as_name:
self.fix_simple_name(name.children[0]) if name.children[0].type == token.NAME else \
self.fix_dotted_name(name.children[0])
elif name.type == syms.dotted_name:
self.fix_dotted_name(name)
elif from_import_submod:
renamed = results.get("renamed")
new_name, new_attr = self.get_dotted_import_replacement(name, attr, renamed=renamed)
if new_attr is not None:
name.replace(new_name)
attr.replace(new_attr)
else:
children = [Name("import"), new_name]
node.replace(Node(syms.import_name, children, prefix=node.prefix))
elif dotted_name:
self.fix_dotted_name(dotted_name)
elif name_import or from_import:
self.fix_simple_name(name)
elif name and not attr:
if does_tree_import(None, MAPPING[name.value], node):
self.fix_simple_name(name)
elif name and attr:
# Note that this will fix a dotted name that was never imported. This will probably not matter.
self.fix_dotted_name(node)
elif imported and imported.type == syms.import_as_names:
self.fix_submod_import(imported=imported.children, node=node, name=name.value) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/imports_fix_alt_formatting.py | imports_fix_alt_formatting.py |
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, BlankLine, find_binding, find_root
class FixIntern(fixer_base.BaseFix):
PATTERN = """
power< 'sys' trailer < '.' 'intern' >
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any* >
|
import_from< 'from' 'sys' 'import'
import_as_names< pre=any* binding='intern' post=any* > any* >
|
import_from< 'from' 'sys' 'import' simple='intern' >
"""
def transform(self, node, results):
name = results.get("name")
binding = results.get("binding")
pre = results.get("pre")
post = results.get("post")
simple = results.get("simple")
if simple:
binding = find_binding("intern", find_root(node), "sys")
binding.remove()
return
if binding:
if not pre and not post:
new_binding = find_binding("intern", find_root(node), "sys")
new_binding.remove()
return
elif not pre and post:
for ch in node.children:
if type(ch) == pytree.Node:
assert ch.children[0].prefix + "intern" \
== str(ch.children[0])
ch.children[0].remove() # intern
assert ch.children[0].prefix + "," \
== str(ch.children[0])
ch.children[0].remove() # ,
return
elif not post and pre:
for ch in node.children:
if type(ch) == pytree.Node:
assert ch.children[-1].prefix + "intern" \
== str(ch.children[-1])
ch.children[-1].remove() # intern
assert ch.children[-1].prefix + "," \
== str(ch.children[-1])
ch.children[-1].remove() # ,
return
elif post and pre:
for ch in node.children:
if type(ch) == pytree.Node:
for ch_ in ch.children:
if ch_ and ch_.prefix + "intern" == str(ch_):
last_ch_ = ch_.prev_sibling
ch_.remove() # intern
assert last_ch_.prefix + "," \
== str(last_ch_)
last_ch_.remove() # ,
return
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
[Name("intern")] +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()] + after)])
new.prefix = node.prefix
return new | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_intern.py | fix_intern.py |