sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def click_field(self, move_x, move_y): """Click one grid by given position.""" field_status = self.info_map[move_y, move_x] # can only click blank region if field_status == 11: if self.mine_map[move_y, move_x] == 1: self.info_map[move_y, move_x] = 12 else: # discover the region. self.discover_region(move_x, move_y)
Click one grid by given position.
entailment
def discover_region(self, move_x, move_y): """Discover region from given location.""" field_list = deque([(move_y, move_x)]) while len(field_list) != 0: field = field_list.popleft() (tl_idx, br_idx, region_sum) = self.get_region(field[1], field[0]) if region_sum == 0: self.info_map[field[0], field[1]] = region_sum # get surrounding to queue region_mat = self.info_map[tl_idx[0]:br_idx[0]+1, tl_idx[1]:br_idx[1]+1] x_list, y_list = np.nonzero(region_mat == 11) for x_idx, y_idx in zip(x_list, y_list): field_temp = (x_idx+max(field[0]-1, 0), y_idx+max(field[1]-1, 0)) if field_temp not in field_list: field_list.append(field_temp) elif region_sum > 0: self.info_map[field[0], field[1]] = region_sum
Discover region from given location.
entailment
def get_region(self, move_x, move_y): """Get region around a location.""" top_left = (max(move_y-1, 0), max(move_x-1, 0)) bottom_right = (min(move_y+1, self.board_height-1), min(move_x+1, self.board_width-1)) region_sum = self.mine_map[top_left[0]:bottom_right[0]+1, top_left[1]:bottom_right[1]+1].sum() return top_left, bottom_right, region_sum
Get region around a location.
entailment
def flag_field(self, move_x, move_y): """Flag a grid by given position.""" field_status = self.info_map[move_y, move_x] # a questioned or undiscovered field if field_status != 9 and (field_status == 10 or field_status == 11): self.info_map[move_y, move_x] = 9
Flag a grid by given position.
entailment
def unflag_field(self, move_x, move_y): """Unflag or unquestion a grid by given position.""" field_status = self.info_map[move_y, move_x] if field_status == 9 or field_status == 10: self.info_map[move_y, move_x] = 11
Unflag or unquestion a grid by given position.
entailment
def question_field(self, move_x, move_y): """Question a grid by given position.""" field_status = self.info_map[move_y, move_x] # a questioned or undiscovered field if field_status != 10 and (field_status == 9 or field_status == 11): self.info_map[move_y, move_x] = 10
Question a grid by given position.
entailment
def check_board(self): """Check the board status and give feedback.""" num_mines = np.sum(self.info_map == 12) num_undiscovered = np.sum(self.info_map == 11) num_questioned = np.sum(self.info_map == 10) if num_mines > 0: return 0 elif np.array_equal(self.info_map == 9, self.mine_map): return 1 elif num_undiscovered > 0 or num_questioned > 0: return 2
Check the board status and give feedback.
entailment
def board_msg(self): """Structure a board as in print_board.""" board_str = "s\t\t" for i in xrange(self.board_width): board_str += str(i)+"\t" board_str = board_str.expandtabs(4)+"\n\n" for i in xrange(self.board_height): temp_line = str(i)+"\t\t" for j in xrange(self.board_width): if self.info_map[i, j] == 9: temp_line += "@\t" elif self.info_map[i, j] == 10: temp_line += "?\t" elif self.info_map[i, j] == 11: temp_line += "*\t" elif self.info_map[i, j] == 12: temp_line += "!\t" else: temp_line += str(self.info_map[i, j])+"\t" board_str += temp_line.expandtabs(4)+"\n" return board_str
Structure a board as in print_board.
entailment
def report_response(response, request_headers=True, request_body=True, response_headers=False, response_body=False, redirection=False): """ 生成响应报告 :param response: ``requests.models.Response`` 对象 :param request_headers: 是否加入请求头 :param request_body: 是否加入请求体 :param response_headers: 是否加入响应头 :param response_body: 是否加入响应体 :param redirection: 是否加入重定向响应 :return: str """ # https://docs.python.org/3/library/string.html#formatstrings url = 'Url: [{method}]{url} {status} {elapsed:.2f}ms'.format( method=response.request.method, url=response.url, status=response.status_code, elapsed=response.elapsed.total_seconds() * 1000 ) pieces = [url] if request_headers: request_headers = 'Request headers: {request_headers}'.format(request_headers=response.request.headers) pieces.append(request_headers) if request_body: request_body = 'Request body: {request_body}'.format(request_body=response.request.body) pieces.append(request_body) if response_headers: response_headers = 'Response headers: {response_headers}'.format(response_headers=response.headers) pieces.append(response_headers) if response_body: response_body = 'Response body: {response_body}'.format(response_body=response.text) pieces.append(response_body) reporter = '\n'.join(pieces) if redirection and response.history: for h in response.history[::-1]: redirect_reporter = report_response( h, request_headers, request_body, response_headers, response_body, redirection=False ) reporter = '\n'.join([redirect_reporter, ' Redirect ↓ '.center(72, '-'), reporter]) return reporter
生成响应报告 :param response: ``requests.models.Response`` 对象 :param request_headers: 是否加入请求头 :param request_body: 是否加入请求体 :param response_headers: 是否加入响应头 :param response_body: 是否加入响应体 :param redirection: 是否加入重定向响应 :return: str
entailment
def init_ui(self): """Setup control widget UI.""" self.control_layout = QHBoxLayout() self.setLayout(self.control_layout) self.reset_button = QPushButton() self.reset_button.setFixedSize(40, 40) self.reset_button.setIcon(QtGui.QIcon(WIN_PATH)) self.game_timer = QLCDNumber() self.game_timer.setStyleSheet("QLCDNumber {color: red;}") self.game_timer.setFixedWidth(100) self.move_counter = QLCDNumber() self.move_counter.setStyleSheet("QLCDNumber {color: red;}") self.move_counter.setFixedWidth(100) self.control_layout.addWidget(self.game_timer) self.control_layout.addWidget(self.reset_button) self.control_layout.addWidget(self.move_counter)
Setup control widget UI.
entailment
def init_ui(self): """Init game interface.""" board_width = self.ms_game.board_width board_height = self.ms_game.board_height self.create_grid(board_width, board_height) self.time = 0 self.timer = QtCore.QTimer() self.timer.timeout.connect(self.timing_game) self.timer.start(1000)
Init game interface.
entailment
def create_grid(self, grid_width, grid_height): """Create a grid layout with stacked widgets. Parameters ---------- grid_width : int the width of the grid grid_height : int the height of the grid """ self.grid_layout = QGridLayout() self.setLayout(self.grid_layout) self.grid_layout.setSpacing(1) self.grid_wgs = {} for i in xrange(grid_height): for j in xrange(grid_width): self.grid_wgs[(i, j)] = FieldWidget() self.grid_layout.addWidget(self.grid_wgs[(i, j)], i, j)
Create a grid layout with stacked widgets. Parameters ---------- grid_width : int the width of the grid grid_height : int the height of the grid
entailment
def timing_game(self): """Timing game.""" self.ctrl_wg.game_timer.display(self.time) self.time += 1
Timing game.
entailment
def reset_game(self): """Reset game board.""" self.ms_game.reset_game() self.update_grid() self.time = 0 self.timer.start(1000)
Reset game board.
entailment
def update_grid(self): """Update grid according to info map.""" info_map = self.ms_game.get_info_map() for i in xrange(self.ms_game.board_height): for j in xrange(self.ms_game.board_width): self.grid_wgs[(i, j)].info_label(info_map[i, j]) self.ctrl_wg.move_counter.display(self.ms_game.num_moves) if self.ms_game.game_status == 2: self.ctrl_wg.reset_button.setIcon(QtGui.QIcon(CONTINUE_PATH)) elif self.ms_game.game_status == 1: self.ctrl_wg.reset_button.setIcon(QtGui.QIcon(WIN_PATH)) self.timer.stop() elif self.ms_game.game_status == 0: self.ctrl_wg.reset_button.setIcon(QtGui.QIcon(LOSE_PATH)) self.timer.stop()
Update grid according to info map.
entailment
def init_ui(self): """Init the ui.""" self.id = 11 self.setFixedSize(self.field_width, self.field_height) self.setPixmap(QtGui.QPixmap(EMPTY_PATH).scaled( self.field_width*3, self.field_height*3)) self.setStyleSheet("QLabel {background-color: blue;}")
Init the ui.
entailment
def mousePressEvent(self, event): """Define mouse press event.""" if event.button() == QtCore.Qt.LeftButton: # get label position p_wg = self.parent() p_layout = p_wg.layout() idx = p_layout.indexOf(self) loc = p_layout.getItemPosition(idx)[:2] if p_wg.ms_game.game_status == 2: p_wg.ms_game.play_move("click", loc[1], loc[0]) p_wg.update_grid() elif event.button() == QtCore.Qt.RightButton: p_wg = self.parent() p_layout = p_wg.layout() idx = p_layout.indexOf(self) loc = p_layout.getItemPosition(idx)[:2] if p_wg.ms_game.game_status == 2: if self.id == 9: self.info_label(10) p_wg.ms_game.play_move("question", loc[1], loc[0]) p_wg.update_grid() elif self.id == 11: self.info_label(9) p_wg.ms_game.play_move("flag", loc[1], loc[0]) p_wg.update_grid() elif self.id == 10: self.info_label(11) p_wg.ms_game.play_move("unflag", loc[1], loc[0]) p_wg.update_grid()
Define mouse press event.
entailment
def info_label(self, indicator): """Set info label by given settings. Parameters ---------- indicator : int A number where 0-8 is number of mines in srrounding. 12 is a mine field. """ if indicator in xrange(1, 9): self.id = indicator self.setPixmap(QtGui.QPixmap(NUMBER_PATHS[indicator]).scaled( self.field_width, self.field_height)) elif indicator == 0: self.id == 0 self.setPixmap(QtGui.QPixmap(NUMBER_PATHS[0]).scaled( self.field_width, self.field_height)) elif indicator == 12: self.id = 12 self.setPixmap(QtGui.QPixmap(BOOM_PATH).scaled(self.field_width, self.field_height)) self.setStyleSheet("QLabel {background-color: black;}") elif indicator == 9: self.id = 9 self.setPixmap(QtGui.QPixmap(FLAG_PATH).scaled(self.field_width, self.field_height)) self.setStyleSheet("QLabel {background-color: #A3C1DA;}") elif indicator == 10: self.id = 10 self.setPixmap(QtGui.QPixmap(QUESTION_PATH).scaled( self.field_width, self.field_height)) self.setStyleSheet("QLabel {background-color: yellow;}") elif indicator == 11: self.id = 11 self.setPixmap(QtGui.QPixmap(EMPTY_PATH).scaled( self.field_width*3, self.field_height*3)) self.setStyleSheet('QLabel {background-color: blue;}')
Set info label by given settings. Parameters ---------- indicator : int A number where 0-8 is number of mines in srrounding. 12 is a mine field.
entailment
def run(self): """Thread behavior.""" self.ms_game.tcp_accept() while True: data = self.ms_game.tcp_receive() if data == "help\n": self.ms_game.tcp_help() self.ms_game.tcp_send("> ") elif data == "exit\n": self.ms_game.tcp_close() elif data == "print\n": self.ms_game.tcp_send(self.ms_game.get_board()) self.ms_game.tcp_send("> ") elif data == "": self.ms_game.tcp_send("> ") else: self.transfer.emit(data) self.ms_game.tcp_send("> ") if self.ms_game.game_status == 1: self.ms_game.tcp_send("[MESSAGE] YOU WIN!\n") self.ms_game.tcp_close() elif self.ms_game.game_status == 0: self.ms_game.tcp_send("[MESSAGE] YOU LOSE!\n") self.ms_game.tcp_close()
Thread behavior.
entailment
def options(self, parser, env): """Register commandline options. """ parser.add_option( "--epdb", action="store_true", dest="epdb_debugErrors", default=env.get('NOSE_EPDB', False), help="Drop into extended debugger on errors") parser.add_option( "--epdb-failures", action="store_true", dest="epdb_debugFailures", default=env.get('NOSE_EPDB_FAILURES', False), help="Drop into extended debugger on failures")
Register commandline options.
entailment
def configure(self, options, conf): """Configure which kinds of exceptions trigger plugin. """ self.conf = conf self.enabled = options.epdb_debugErrors or options.epdb_debugFailures self.enabled_for_errors = options.epdb_debugErrors self.enabled_for_failures = options.epdb_debugFailures
Configure which kinds of exceptions trigger plugin.
entailment
def set_trace_cond(*args, **kw): """ Sets a condition for set_trace statements that have the specified marker. A condition can either callable, in which case it should take one argument, which is the number of times set_trace(marker) has been called, or it can be a number, in which case the break will only be called. """ for key, val in kw.items(): Epdb.set_trace_cond(key, val) for arg in args: Epdb.set_trace_cond(arg, True)
Sets a condition for set_trace statements that have the specified marker. A condition can either callable, in which case it should take one argument, which is the number of times set_trace(marker) has been called, or it can be a number, in which case the break will only be called.
entailment
def matchFileOnDirPath(curpath, pathdir): """Find match for a file by slicing away its directory elements from the front and replacing them with pathdir. Assume that the end of curpath is right and but that the beginning may contain some garbage (or it may be short) Overlaps are allowed: e.g /tmp/fdjsklf/real/path/elements, /all/the/real/ => /all/the/real/path/elements (assuming that this combined path exists) """ if os.path.exists(curpath): return curpath filedirs = curpath.split('/')[1:] filename = filedirs[-1] filedirs = filedirs[:-1] if pathdir[-1] == '/': pathdir = pathdir[:-1] # assume absolute paths pathdirs = pathdir.split('/')[1:] lp = len(pathdirs) # Cut off matching file elements from the ends of the two paths for x in range(1, min(len(filedirs), lp)): # XXX this will not work if you have # /usr/foo/foo/filename.py if filedirs[-1] == pathdirs[-x]: filedirs = filedirs[:-1] else: break # Now cut try cuting off incorrect initial elements of curpath while filedirs: tmppath = '/' + '/'.join(pathdirs + filedirs + [filename]) if os.path.exists(tmppath): return tmppath filedirs = filedirs[1:] tmppath = '/' + '/'.join(pathdirs + [filename]) if os.path.exists(tmppath): return tmppath return None
Find match for a file by slicing away its directory elements from the front and replacing them with pathdir. Assume that the end of curpath is right and but that the beginning may contain some garbage (or it may be short) Overlaps are allowed: e.g /tmp/fdjsklf/real/path/elements, /all/the/real/ => /all/the/real/path/elements (assuming that this combined path exists)
entailment
def lookupmodule(self, filename): """Helper function for break/clear parsing -- may be overridden. lookupmodule() translates (possibly incomplete) file or module name into an absolute file name. """ if os.path.isabs(filename) and os.path.exists(filename): return filename f = os.path.join(sys.path[0], filename) if os.path.exists(f) and self.canonic(f) == self.mainpyfile: return f root, ext = os.path.splitext(filename) origFileName = filename if ext == '': filename = filename + '.py' if os.path.isabs(filename): return filename for dirname in sys.path: while os.path.islink(dirname): dirname = os.path.realpath(os.path.join( os.path.dirname(dirname), os.readlink(dirname))) fullname = os.path.join(dirname, filename) if os.path.exists(fullname): return fullname if origFileName in sys.modules: return sys.modules[origFileName].__file__ return None
Helper function for break/clear parsing -- may be overridden. lookupmodule() translates (possibly incomplete) file or module name into an absolute file name.
entailment
def set_trace_cond(klass, marker='default', cond=None): """ Sets a condition for set_trace statements that have the specified marker. A condition can be either callable, in which case it should take one argument, which is the number of times set_trace(marker) has been called, or it can be a number, in which case the break will only be called. """ tc = klass.trace_counts tc[marker] = [cond, 0]
Sets a condition for set_trace statements that have the specified marker. A condition can be either callable, in which case it should take one argument, which is the number of times set_trace(marker) has been called, or it can be a number, in which case the break will only be called.
entailment
def _set_trace(self, skip=0): """Start debugging from here.""" frame = sys._getframe().f_back # go up the specified number of frames for i in range(skip): frame = frame.f_back self.reset() while frame: frame.f_trace = self.trace_dispatch self.botframe = frame frame = frame.f_back self.set_step() sys.settrace(self.trace_dispatch)
Start debugging from here.
entailment
def user_call(self, frame, argument_list): """This method is called when there is the remote possibility that we ever need to stop in this function.""" if self.stop_here(frame): pdb.Pdb.user_call(self, frame, argument_list)
This method is called when there is the remote possibility that we ever need to stop in this function.
entailment
def user_return(self, frame, return_value): """This function is called when a return trap is set here.""" pdb.Pdb.user_return(self, frame, return_value)
This function is called when a return trap is set here.
entailment
def user_exception(self, frame, exc_info): """This function is called if an exception occurs, but only if we are to stop at or just below this level.""" pdb.Pdb.user_exception(self, frame, exc_info)
This function is called if an exception occurs, but only if we are to stop at or just below this level.
entailment
def stackToList(stack): """ Convert a chain of traceback or frame objects into a list of frames. """ if isinstance(stack, types.TracebackType): while stack.tb_next: stack = stack.tb_next stack = stack.tb_frame out = [] while stack: out.append(stack) stack = stack.f_back return out
Convert a chain of traceback or frame objects into a list of frames.
entailment
def process_IAC(self, sock, cmd, option): """ Read in and parse IAC commands as passed by telnetlib. SB/SE commands are stored in sbdataq, and passed in w/ a command of SE. """ if cmd == DO: if option == TM: # timing mark - send WILL into outgoing stream os.write(self.remote, IAC + WILL + TM) else: pass elif cmd == IP: # interrupt process os.write(self.local, IPRESP) elif cmd == SB: pass elif cmd == SE: option = self.sbdataq[0] if option == NAWS[0]: # negotiate window size. cols = six.indexbytes(self.sbdataq, 1) rows = six.indexbytes(self.sbdataq, 2) s = struct.pack('HHHH', rows, cols, 0, 0) fcntl.ioctl(self.local, termios.TIOCSWINSZ, s) elif cmd == DONT: pass else: pass
Read in and parse IAC commands as passed by telnetlib. SB/SE commands are stored in sbdataq, and passed in w/ a command of SE.
entailment
def handle(self): """ Performs endless processing of socket input/output, passing cooked information onto the local process. """ while True: toRead = select.select([self.local, self.remote], [], [], 0.1)[0] if self.local in toRead: data = os.read(self.local, 4096) self.sock.sendall(data) continue if self.remote in toRead or self.rawq: buf = self.read_eager() os.write(self.local, buf) continue
Performs endless processing of socket input/output, passing cooked information onto the local process.
entailment
def handle(self): """ Creates a child process that is fully controlled by this request handler, and serves data to and from it via the protocol handler. """ pid, fd = pty.fork() if pid: protocol = TelnetServerProtocolHandler(self.request, fd) protocol.handle() else: self.execute()
Creates a child process that is fully controlled by this request handler, and serves data to and from it via the protocol handler.
entailment
def handle_request(self): """ Handle one request - serve current process to one connection. Use close_request() to disconnect this process. """ try: request, client_address = self.get_request() except socket.error: return if self.verify_request(request, client_address): try: # we only serve once, and we want to free up the port # for future serves. self.socket.close() self.process_request(request, client_address) except SocketConnected as err: self._serve_process(err.slaveFd, err.serverPid) return except Exception as err: self.handle_error(request, client_address) self.close_request()
Handle one request - serve current process to one connection. Use close_request() to disconnect this process.
entailment
def _serve_process(self, slaveFd, serverPid): """ Serves a process by connecting its outputs/inputs to the pty slaveFd. serverPid is the process controlling the master fd that passes that output over the socket. """ self.serverPid = serverPid if sys.stdin.isatty(): self.oldTermios = termios.tcgetattr(sys.stdin.fileno()) else: self.oldTermios = None self.oldStderr = SavedFile(2, sys, 'stderr') self.oldStdout = SavedFile(1, sys, 'stdout') self.oldStdin = SavedFile(0, sys, 'stdin') self.oldStderr.save(slaveFd, mode="w") self.oldStdout.save(slaveFd, mode="w") self.oldStdin.save(slaveFd, mode="r") os.close(slaveFd) self.closed = False
Serves a process by connecting its outputs/inputs to the pty slaveFd. serverPid is the process controlling the master fd that passes that output over the socket.
entailment
def int_input(message, low, high, show_range = True): ''' Ask a user for a int input between two values args: message (str): Prompt for user low (int): Low value, user entered value must be > this value to be accepted high (int): High value, user entered value must be < this value to be accepted show_range (boolean, Default True): Print hint to user the range returns: int_in (int): Input integer ''' int_in = low - 1 while (int_in < low) or (int_in > high): if show_range: suffix = ' (integer between ' + str(low) + ' and ' + str(high) + ')' else: suffix = '' inp = input('Enter a ' + message + suffix + ': ') if re.match('^-?[0-9]+$', inp) is not None: int_in = int(inp) else: print(colored('Must be an integer, try again!', 'red')) return int_in
Ask a user for a int input between two values args: message (str): Prompt for user low (int): Low value, user entered value must be > this value to be accepted high (int): High value, user entered value must be < this value to be accepted show_range (boolean, Default True): Print hint to user the range returns: int_in (int): Input integer
entailment
def float_input(message, low, high): ''' Ask a user for a float input between two values args: message (str): Prompt for user low (float): Low value, user entered value must be > this value to be accepted high (float): High value, user entered value must be < this value to be accepted returns: float_in (int): Input float ''' float_in = low - 1.0 while (float_in < low) or (float_in > high): inp = input('Enter a ' + message + ' (float between ' + str(low) + ' and ' + str(high) + '): ') if re.match('^([0-9]*[.])?[0-9]+$', inp) is not None: float_in = float(inp) else: print(colored('Must be a float, try again!', 'red')) return float_in
Ask a user for a float input between two values args: message (str): Prompt for user low (float): Low value, user entered value must be > this value to be accepted high (float): High value, user entered value must be < this value to be accepted returns: float_in (int): Input float
entailment
def bool_input(message): ''' Ask a user for a boolean input args: message (str): Prompt for user returns: bool_in (boolean): Input boolean ''' while True: suffix = ' (true or false): ' inp = input(message + suffix) if inp.lower() == 'true': return True elif inp.lower() == 'false': return False else: print(colored('Must be either true or false, try again!', 'red'))
Ask a user for a boolean input args: message (str): Prompt for user returns: bool_in (boolean): Input boolean
entailment
def main(args = None): ''' Main entry point for transfer command line tool. This essentially will marshall the user to the functions they need. ''' parser = argparse.ArgumentParser(description = 'Tool to perform transfer learning') parser.add_argument('-c','--configure', action = 'store_true', help = 'Configure transfer') parser.add_argument('-e','--export', action = 'store_true', dest = 'export_config', help = 'Export configuration and models') parser.add_argument('-i','--import', action = 'store', type = str, default = None, dest = 'import_config', help = 'Import configuration and models') parser.add_argument('-p','--project', action = 'store', type = str, default = None, dest = 'project', help = 'Specify a project, if not supplied it will be picked from configured projects') parser.add_argument('-r','--run', action = 'store_true', help = 'Run all transfer learning operations') parser.add_argument('-f','--final', action = 'store_true', help = 'Run final training on all layers: Warning SLOW!') parser.add_argument('-l','--last-weights', action = 'store_true', dest = 'last', help = 'Restart from the last weights, rather than the best intermediate weights') parser.add_argument('--predict', action = 'store', type = str, default = None, const = 'default', dest = 'predict', nargs='?', help = 'Predict model on file or directory') parser.add_argument('--prediction-rest-api', action = 'store_true', dest = 'rest_api', help = 'Start rest api to make predictions on files or directories') if len(sys.argv) == 1: parser.print_help() return args = parser.parse_args() if args.import_config is not None: import_config(args.import_config) return elif args.export_config: project = select_project(args.project) weights = model_input(project) ind = model_individual_input(project, weights) export_config(project, weights, ind) return elif args.configure: configure() return else: project = select_project(args.project) if args.run: if project['is_array'] == False: project = images_to_array(project) update_config(project) if project['is_augmented'] == False: project = augment_arrays(project) update_config(project) if project['is_pre_model'] == False: project = pre_model(project) update_config(project) project = train_model(project, final = args.final, last = args.last) update_config(project) print('') print(colored('Completed modeling round: ' + str(project['model_round']), 'cyan')) print('') print('Best current model: ', colored(project['best_weights'], 'yellow')) print('Last current model: ', colored(project['last_weights'], 'yellow')) print('') print('To further refine the model, run again with:') print('') print(colored(' transfer --run --project ' + project['name'], 'green')) print('') print('To make predictions:') print('') print(colored(' transfer --predict [optional dir or file] --project ' + project['name'], 'yellow')) print('') elif args.rest_api: if project['server_weights'] is not None: start_server(project, 'server_weights') elif project['best_weights'] is not None: weights = model_input(project) start_server(project, weights) else: print('Model is not trained. Please first run your project:') print('') print(colored(' transfer --run', 'green')) print('') elif args.predict is not None: if args.predict == 'default': completer = Completer() readline.set_completer_delims('\t') readline.parse_and_bind('tab: complete') readline.set_completer(completer.path_completer) args.predict = str_input('Enter a path to file(s): ') if project['server_weights'] is not None: predict_model(project, 'server_weights', args.predict) elif project['best_weights'] is not None: weights = model_input(project) print('Predicting on image(s) in: ', colored(args.predict, 'yellow')) predict_model(project, weights, args.predict) else: print('Model is not trained. Please first run your project:') print('') print(colored(' transfer --run', 'green')) print('')
Main entry point for transfer command line tool. This essentially will marshall the user to the functions they need.
entailment
def configure(): ''' Configure the transfer environment and store ''' completer = Completer() readline.set_completer_delims('\t') readline.parse_and_bind('tab: complete') readline.set_completer(completer.path_completer) home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: config = yaml.load(fp.read()) else: config = [] project_name = input('Name your project: ') existing_project = None for project in config: if project_name == project['name']: existing_project = project_name if existing_project is not None: print(colored('Project ' + project_name + ' already exists', 'red')) overwrite = str_input('Would you like to overwrite this project? (yes or no) ', ['yes', 'no']) if overwrite == 'no': return else: config = [project for project in config if project_name != project['name']] image_path = os.path.expanduser(input('Select parent directory for your images: ')) path_unset = True while path_unset: project_path = os.path.expanduser(input('Select destination for your project: ')) if (project_path.find(image_path) == 0): print('Project destination should not be same or within image directory!') else: path_unset = False print('Select architecture:') print('[0] resnet50') print('[1] xception') print('[2] inception_v3') architecture = int_input('choice', 0, 2, show_range = False) if architecture == 0: arch = 'resnet50' img_dim = 224 conv_dim = 7 final_cutoff = 80 elif architecture == 1: arch = 'xception' img_dim = 299 conv_dim = 10 final_cutoff = 80 else: arch = 'inception_v3' img_dim = 299 conv_dim = 8 final_cutoff = 80 api_port = int_input('port for local prediction API (suggested: 5000)', 1024, 49151) kfold = int_input('number of folds to use (suggested: 5)', 3, 10) kfold_every = bool_input('Fit a model for every fold? (if false, just fit one)') print('Warning: if working on a remote computer, you may not be able to plot!') plot_cm = bool_input('Plot a confusion matrix after training?') batch_size = int_input('batch size (suggested: 8)', 1, 64) learning_rate = float_input('learning rate (suggested: 0.001)', 0, 1) learning_rate_decay = float_input('learning decay rate (suggested: 0.000001)', 0, 1) cycle = int_input('number of cycles before resetting the learning rate (suggested: 3)', 1, 10) num_rounds = int_input('number of rounds (suggested: 3)', 1, 100) print('Select image resolution:') print('[0] low (' + str(img_dim) + ' px)') print('[1] mid (' + str(img_dim * 2) + ' px)') print('[2] high (' + str(img_dim * 4) + ' px)') img_resolution_index = int_input('choice', 0, 2, show_range = False) if img_resolution_index == 0: img_size = 1 elif img_resolution_index == 1: img_size = 2 else: img_size = 4 use_augmentation = str_input('Would you like to add image augmentation? (yes or no) ', ['yes', 'no']) if use_augmentation == 'yes': augmentations = select_augmentations() else: augmentations = None project = {'name': project_name, 'img_path': image_path, 'path': project_path, 'plot': plot_cm, 'api_port': api_port, 'kfold': kfold, 'kfold_every': kfold_every, 'cycle': cycle, 'seed': np.random.randint(9999), 'batch_size': batch_size, 'learning_rate': learning_rate, 'learning_rate_decay': learning_rate_decay, 'final_cutoff': final_cutoff, 'rounds': num_rounds, 'img_size': img_size, 'augmentations': augmentations, 'architecture': arch, 'img_dim': img_dim, 'conv_dim': conv_dim, 'is_split': False, 'is_array': False, 'is_augmented': False, 'is_pre_model': False, 'is_final': False, 'model_round': 0, 'server_weights': None, 'last_weights': None, 'best_weights': None} config.append(project) store_config(config) print('') print(colored('Project configure saved!', 'cyan')) print('') print('To run project:') print('') print(colored(' transfer --run --project ' + project_name, 'green')) print('or') print(colored(' transfer -r -p ' + project_name, 'green'))
Configure the transfer environment and store
entailment
def configure_server(): ''' Configure the transfer environment and store ''' home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: config = yaml.load(fp.read()) else: config = [] project_name = input('Name your project: ') existing_project = None for project in config: if project_name == project['name']: existing_project = project_name if existing_project is not None: print(colored('Project ' + project_name + ' already exists', 'red')) overwrite = str_input('Would you like to overwrite this project? (yes or no) ', ['yes', 'no']) if overwrite == 'no': return else: config = [project for project in config if project_name != project['name']] api_port = int_input('port for local prediction API (suggested: 5000)', 1024, 49151) print('Select image resolution:') print('[0] low (224 px)') print('[1] mid (448 px)') print('[2] high (896 px)') img_resolution_index = int_input('choice', 0, 2, show_range = False) if img_resolution_index == 0: img_size = 1 elif img_resolution_index == 1: img_size = 2 else: img_size = 4 num_categories = int_input('number of image categories in your model', 0, 10000000) weights = False while weights == False: server_weights = os.path.expanduser(input('Select weights file: ')) if os.path.isfile(server_weights): weights = True else: print('Cannot find the weight file: ', server_weights) project = {'name': project_name, 'api_port': api_port, 'img_size': img_size, 'number_categories': num_categories, 'server_weights': server_weights} config.append(project) store_config(config) print('') print(colored('Project configure saved!', 'cyan')) print('') print('To start the server:') print('') print(colored(' transfer --prediction-rest-api --project ' + project_name, 'green')) print('or') print(colored(' transfer --prediction-rest-api -p ' + project_name, 'green'))
Configure the transfer environment and store
entailment
def select_project(user_provided_project): ''' Select a project from configuration to run transfer on args: user_provided_project (str): Project name that should match a project in the config returns: project (dict): Configuration settings for a user selected project ''' home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: projects = yaml.load(fp.read()) if len(projects) == 1: project = projects[0] else: if user_provided_project in [project['name'] for project in projects]: for inner_project in projects: if user_provided_project == inner_project['name']: project = inner_project else: print('Select your project') for i, project in enumerate(projects): print('[' + str(i) + ']: ' + project['name']) project_index = int_input('project', -1, len(projects), show_range = False) project = projects[project_index] else: print('Transfer is not configured.') print('Please run:') print('') print(colored(' transfer --configure', 'green')) return print(colored('Project selected: ' + project['name'], 'cyan')) return project
Select a project from configuration to run transfer on args: user_provided_project (str): Project name that should match a project in the config returns: project (dict): Configuration settings for a user selected project
entailment
def store_config(config, suffix = None): ''' Store configuration args: config (list[dict]): configurations for each project ''' home = os.path.expanduser('~') if suffix is not None: config_path = os.path.join(home, '.transfer', suffix) else: config_path = os.path.join(home, '.transfer') os.makedirs(config_path, exist_ok = True) with open(os.path.join(config_path, 'config.yaml'), 'w') as fp: yaml.dump(config, fp)
Store configuration args: config (list[dict]): configurations for each project
entailment
def update_config(updated_project): ''' Update project in configuration args: updated_project (dict): Updated project configuration values ''' home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: projects = yaml.load(fp.read()) replace_index = -1 for i, project in enumerate(projects): if project['name'] == updated_project['name']: replace_index = i if replace_index > -1: projects[replace_index] = updated_project store_config(projects) else: print('Not saving configuration') print(colored('Project: ' + updated_project['name'] + ' was not found in configured projects!', 'red')) else: print('Transfer is not configured.') print('Please run:') print('') print(colored(' transfer --configure', 'cyan')) return
Update project in configuration args: updated_project (dict): Updated project configuration values
entailment
def atom_criteria(*params): """An auxiliary function to construct a dictionary of Criteria""" result = {} for index, param in enumerate(params): if param is None: continue elif isinstance(param, int): result[index] = HasAtomNumber(param) else: result[index] = param return result
An auxiliary function to construct a dictionary of Criteria
entailment
def _check_symbols(self, symbols): """the size must be the same as the length of the array numbers and all elements must be strings""" if len(symbols) != self.size: raise TypeError("The number of symbols in the graph does not " "match the length of the atomic numbers array.") for symbol in symbols: if not isinstance(symbol, str): raise TypeError("All symbols must be strings.")
the size must be the same as the length of the array numbers and all elements must be strings
entailment
def from_geometry(cls, molecule, do_orders=False, scaling=1.0): """Construct a MolecularGraph object based on interatomic distances All short distances are computed with the binning module and compared with a database of bond lengths. Based on this comparison, bonded atoms are detected. Before marking a pair of atoms A and B as bonded, it is also checked that there is no third atom C somewhat between A and B. When an atom C exists that is closer to B (than A) and the angle A-B-C is less than 45 degrees, atoms A and B are not bonded. Similarly if C is closer to A (than B) and the angle B-A-C is less then 45 degrees, A and B are not connected. Argument: | ``molecule`` -- The molecule to derive the graph from Optional arguments: | ``do_orders`` -- set to True to estimate the bond order | ``scaling`` -- scale the threshold for the connectivity. increase this to 1.5 in case of transition states when a fully connected topology is required. """ from molmod.bonds import bonds unit_cell = molecule.unit_cell pair_search = PairSearchIntra( molecule.coordinates, bonds.max_length*bonds.bond_tolerance*scaling, unit_cell ) orders = [] lengths = [] edges = [] for i0, i1, delta, distance in pair_search: bond_order = bonds.bonded(molecule.numbers[i0], molecule.numbers[i1], distance/scaling) if bond_order is not None: if do_orders: orders.append(bond_order) lengths.append(distance) edges.append((i0,i1)) if do_orders: result = cls(edges, molecule.numbers, orders, symbols=molecule.symbols) else: result = cls(edges, molecule.numbers, symbols=molecule.symbols) # run a check on all neighbors. if two bonds point in a direction that # differs only by 45 deg. the longest of the two is discarded. the # double loop over the neighbors is done such that the longest bonds # are eliminated first slated_for_removal = set([]) threshold = 0.5**0.5 for c, ns in result.neighbors.items(): lengths_ns = [] for n in ns: delta = molecule.coordinates[n] - molecule.coordinates[c] if unit_cell is not None: delta = unit_cell.shortest_vector(delta) length = np.linalg.norm(delta) lengths_ns.append([length, delta, n]) lengths_ns.sort(reverse=True, key=(lambda r: r[0])) for i0, (length0, delta0, n0) in enumerate(lengths_ns): for i1, (length1, delta1, n1) in enumerate(lengths_ns[:i0]): if length1 == 0.0: continue cosine = np.dot(delta0, delta1)/length0/length1 if cosine > threshold: # length1 > length0 slated_for_removal.add((c,n1)) lengths_ns[i1][0] = 0.0 # construct a mask mask = np.ones(len(edges), bool) for i0, i1 in slated_for_removal: edge_index = result.edge_index.get(frozenset([i0,i1])) if edge_index is None: raise ValueError('Could not find edge that has to be removed: %i %i' % (i0, i1)) mask[edge_index] = False # actual removal edges = [edges[i] for i in range(len(edges)) if mask[i]] if do_orders: bond_order = [bond_order[i] for i in range(len(bond_order)) if mask[i]] result = cls(edges, molecule.numbers, orders) else: result = cls(edges, molecule.numbers) lengths = [lengths[i] for i in range(len(lengths)) if mask[i]] result.bond_lengths = np.array(lengths) return result
Construct a MolecularGraph object based on interatomic distances All short distances are computed with the binning module and compared with a database of bond lengths. Based on this comparison, bonded atoms are detected. Before marking a pair of atoms A and B as bonded, it is also checked that there is no third atom C somewhat between A and B. When an atom C exists that is closer to B (than A) and the angle A-B-C is less than 45 degrees, atoms A and B are not bonded. Similarly if C is closer to A (than B) and the angle B-A-C is less then 45 degrees, A and B are not connected. Argument: | ``molecule`` -- The molecule to derive the graph from Optional arguments: | ``do_orders`` -- set to True to estimate the bond order | ``scaling`` -- scale the threshold for the connectivity. increase this to 1.5 in case of transition states when a fully connected topology is required.
entailment
def from_blob(cls, s): """Construct a molecular graph from the blob representation""" atom_str, edge_str = s.split() numbers = np.array([int(s) for s in atom_str.split(",")]) edges = [] orders = [] for s in edge_str.split(","): i, j, o = (int(w) for w in s.split("_")) edges.append((i, j)) orders.append(o) return cls(edges, numbers, np.array(orders))
Construct a molecular graph from the blob representation
entailment
def blob(self): """A compact text representation of the graph""" atom_str = ",".join(str(number) for number in self.numbers) edge_str = ",".join("%i_%i_%i" % (i, j, o) for (i, j), o in zip(self.edges, self.orders)) return "%s %s" % (atom_str, edge_str)
A compact text representation of the graph
entailment
def get_vertex_string(self, i): """Return a string based on the atom number""" number = self.numbers[i] if number == 0: return Graph.get_vertex_string(self, i) else: # pad with zeros to make sure that string sort is identical to number sort return "%03i" % number
Return a string based on the atom number
entailment
def get_edge_string(self, i): """Return a string based on the bond order""" order = self.orders[i] if order == 0: return Graph.get_edge_string(self, i) else: # pad with zeros to make sure that string sort is identical to number sort return "%03i" % order
Return a string based on the bond order
entailment
def get_subgraph(self, subvertices, normalize=False): """Creates a subgraph of the current graph See :meth:`molmod.graphs.Graph.get_subgraph` for more information. """ graph = Graph.get_subgraph(self, subvertices, normalize) if normalize: new_numbers = self.numbers[graph._old_vertex_indexes] # vertices do change else: new_numbers = self.numbers # vertices don't change! if self.symbols is None: new_symbols = None elif normalize: new_symbols = tuple(self.symbols[i] for i in graph._old_vertex_indexes) else: new_symbols = self.symbols new_orders = self.orders[graph._old_edge_indexes] result = MolecularGraph(graph.edges, new_numbers, new_orders, new_symbols) if normalize: result._old_vertex_indexes = graph._old_vertex_indexes result._old_edge_indexes = graph._old_edge_indexes return result
Creates a subgraph of the current graph See :meth:`molmod.graphs.Graph.get_subgraph` for more information.
entailment
def add_hydrogens(self, formal_charges=None): """Returns a molecular graph where hydrogens are added explicitely When the bond order is unknown, it assumes bond order one. If the graph has an attribute formal_charges, this routine will take it into account when counting the number of hydrogens to be added. The returned graph will also have a formal_charges attribute. This routine only adds hydrogen atoms for a limited set of atoms from the periodic system: B, C, N, O, F, Al, Si, P, S, Cl, Br. """ new_edges = list(self.edges) counter = self.num_vertices for i in range(self.num_vertices): num_elec = self.numbers[i] if formal_charges is not None: num_elec -= int(formal_charges[i]) if num_elec >= 5 and num_elec <= 9: num_hydrogen = num_elec - 10 + 8 elif num_elec >= 13 and num_elec <= 17: num_hydrogen = num_elec - 18 + 8 elif num_elec == 35: num_hydrogen = 1 else: continue if num_hydrogen > 4: num_hydrogen = 8 - num_hydrogen for n in self.neighbors[i]: bo = self.orders[self.edge_index[frozenset([i, n])]] if bo <= 0: bo = 1 num_hydrogen -= int(bo) for j in range(num_hydrogen): new_edges.append((i, counter)) counter += 1 new_numbers = np.zeros(counter, int) new_numbers[:self.num_vertices] = self.numbers new_numbers[self.num_vertices:] = 1 new_orders = np.zeros(len(new_edges), int) new_orders[:self.num_edges] = self.orders new_orders[self.num_edges:] = 1 result = MolecularGraph(new_edges, new_numbers, new_orders) return result
Returns a molecular graph where hydrogens are added explicitely When the bond order is unknown, it assumes bond order one. If the graph has an attribute formal_charges, this routine will take it into account when counting the number of hydrogens to be added. The returned graph will also have a formal_charges attribute. This routine only adds hydrogen atoms for a limited set of atoms from the periodic system: B, C, N, O, F, Al, Si, P, S, Cl, Br.
entailment
def check_next_match(self, match, new_relations, subject_graph, one_match): """Check if the (onset for a) match can be a valid (part of a) ring""" if not CustomPattern.check_next_match(self, match, new_relations, subject_graph, one_match): return False if self.strong: # can this ever become a strong ring? vertex1_start = match.forward[self.pattern_graph.central_vertex] for vertex1 in new_relations.values(): paths = list(subject_graph.iter_shortest_paths(vertex1, vertex1_start)) if self.size % 2 == 0 and len(match) == self.size: if len(paths) != 2: #print "NRingPattern.check_next_match: not strong a.1" return False for path in paths: if len(path) != len(match)//2+1: #print "NRingPattern.check_next_match: not strong a.2" return False else: if len(paths) != 1: #print "NRingPattern.check_next_match: not strong b.1" return False if len(paths[0]) != (len(match)+1)//2: #print "NRingPattern.check_next_match: not strong b.2" return False #print "RingPattern.check_next_match: no remarks" return True
Check if the (onset for a) match can be a valid (part of a) ring
entailment
def complete(self, match, subject_graph): """Check the completeness of the ring match""" if not CustomPattern.complete(self, match, subject_graph): return False if self.strong: # If the ring is not strong, return False if self.size % 2 == 0: # even ring for i in range(self.size//2): vertex1_start = match.forward[i] vertex1_stop = match.forward[(i+self.size//2)%self.size] paths = list(subject_graph.iter_shortest_paths(vertex1_start, vertex1_stop)) if len(paths) != 2: #print "Even ring must have two paths between opposite vertices" return False for path in paths: if len(path) != self.size//2+1: #print "Paths between opposite vertices must half the size of the ring+1" return False else: # odd ring for i in range(self.size//2+1): vertex1_start = match.forward[i] vertex1_stop = match.forward[(i+self.size//2)%self.size] paths = list(subject_graph.iter_shortest_paths(vertex1_start, vertex1_stop)) if len(paths) > 1: return False if len(paths[0]) != self.size//2+1: return False vertex1_stop = match.forward[(i+self.size//2+1)%self.size] paths = list(subject_graph.iter_shortest_paths(vertex1_start, vertex1_stop)) if len(paths) > 1: return False if len(paths[0]) != self.size//2+1: return False return True
Check the completeness of the ring match
entailment
def get_kind(self, value): """Return the kind (type) of the attribute""" if isinstance(value, float): return 'f' elif isinstance(value, int): return 'i' else: raise ValueError("Only integer or floating point values can be stored.")
Return the kind (type) of the attribute
entailment
def dump(self, f, name): """Write the attribute to a file-like object""" # print the header line value = self.get() kind = self.get_kind(value) print("% 40s kind=%s value=%s" % (name, kind, value), file=f)
Write the attribute to a file-like object
entailment
def get(self, copy=False): """Return the value of the attribute""" array = getattr(self.owner, self.name) if copy: return array.copy() else: return array
Return the value of the attribute
entailment
def dump(self, f, name): """Write the attribute to a file-like object""" array = self.get() # print the header line print("% 40s kind=%s shape=(%s)" % ( name, array.dtype.kind, ",".join([str(int(size_axis)) for size_axis in array.shape]), ), file=f) # print the numbers counter = 0 for value in array.flat: counter += 1 print("% 20s" % value, end=' ', file=f) if counter % 4 == 0: print(file=f) if counter % 4 != 0: print(file=f)
Write the attribute to a file-like object
entailment
def load(self, f, skip): """Load the array data from a file-like object""" array = self.get() counter = 0 counter_limit = array.size convert = array.dtype.type while counter < counter_limit: line = f.readline() words = line.split() for word in words: if counter >= counter_limit: raise FileFormatError("Wrong array data: too many values.") if not skip: array.flat[counter] = convert(word) counter += 1
Load the array data from a file-like object
entailment
def _register(self, name, AttrCls): """Register a new attribute to take care of with dump and load Arguments: | ``name`` -- the name to be used in the dump file | ``AttrCls`` -- an attr class describing the attribute """ if not issubclass(AttrCls, StateAttr): raise TypeError("The second argument must a StateAttr instance.") if len(name) > 40: raise ValueError("Name can count at most 40 characters.") self._fields[name] = AttrCls(self._owner, name)
Register a new attribute to take care of with dump and load Arguments: | ``name`` -- the name to be used in the dump file | ``AttrCls`` -- an attr class describing the attribute
entailment
def get(self, subset=None): """Return a dictionary object with the registered fields and their values Optional rgument: | ``subset`` -- a list of names to restrict the number of fields in the result """ if subset is None: return dict((name, attr.get(copy=True)) for name, attr in self._fields.items()) else: return dict((name, attr.get(copy=True)) for name, attr in self._fields.items() if name in subset)
Return a dictionary object with the registered fields and their values Optional rgument: | ``subset`` -- a list of names to restrict the number of fields in the result
entailment
def set(self, new_fields, subset=None): """Assign the registered fields based on a dictionary Argument: | ``new_fields`` -- the dictionary with the data to be assigned to the attributes Optional argument: | ``subset`` -- a list of names to restrict the fields that are effectively overwritten """ for name in new_fields: if name not in self._fields and (subset is None or name in subset): raise ValueError("new_fields contains an unknown field '%s'." % name) if subset is not None: for name in subset: if name not in self._fields: raise ValueError("name '%s' in subset is not a known field in self._fields." % name) if name not in new_fields: raise ValueError("name '%s' in subset is not a known field in new_fields." % name) if subset is None: if len(new_fields) != len(self._fields): raise ValueError("new_fields contains too many fields.") for name, attr in self._fields.items(): if name in subset: attr.set(new_fields[name])
Assign the registered fields based on a dictionary Argument: | ``new_fields`` -- the dictionary with the data to be assigned to the attributes Optional argument: | ``subset`` -- a list of names to restrict the fields that are effectively overwritten
entailment
def dump(self, filename): """Dump the registered fields to a file Argument: | ``filename`` -- the file to write to """ with open(filename, "w") as f: for name in sorted(self._fields): self._fields[name].dump(f, name)
Dump the registered fields to a file Argument: | ``filename`` -- the file to write to
entailment
def load(self, filename, subset=None): """Load data into the registered fields Argument: | ``filename`` -- the filename to read from Optional argument: | ``subset`` -- a list of field names that are read from the file. If not given, all data is read from the file. """ with open(filename, "r") as f: name = None num_names = 0 while True: # read a header line line = f.readline() if len(line) == 0: break # process the header line words = line.split() name = words[0] attr = self._fields.get(name) if attr is None: raise FileFormatError("Wrong header: unknown field %s" % name) if not words[1].startswith("kind="): raise FileFormatError("Malformatted array header line. (kind)") kind = words[1][5:] expected_kind = attr.get_kind(attr.get()) if kind != expected_kind: raise FileFormatError("Wrong header: kind of field %s does not match. Got %s, expected %s" % (name, kind, expected_kind)) skip = ((subset is not None) and (name not in subset)) print(words) if (words[2].startswith("shape=(") and words[2].endswith(")")): if not isinstance(attr, ArrayAttr): raise FileFormatError("field '%s' is not an array." % name) shape = words[2][7:-1] if shape[-1] == ', ': shape = shape[:-1] try: shape = tuple(int(word) for word in shape.split(",")) except ValueError: raise FileFormatError("Malformatted array header. (shape)") expected_shape = attr.get().shape if shape != expected_shape: raise FileFormatError("Wrong header: shape of field %s does not match. Got %s, expected %s" % (name, shape, expected_shape)) attr.load(f, skip) elif words[2].startswith("value="): if not isinstance(attr, ScalarAttr): raise FileFormatError("field '%s' is not a single value." % name) if not skip: if kind == 'i': attr.set(int(words[2][6:])) else: attr.set(float(words[2][6:])) else: raise FileFormatError("Malformatted array header line. (shape/value)") num_names += 1 if num_names != len(self._fields) and subset is None: raise FileFormatError("Some fields are missing in the file.")
Load data into the registered fields Argument: | ``filename`` -- the filename to read from Optional argument: | ``subset`` -- a list of field names that are read from the file. If not given, all data is read from the file.
entailment
def _load_bond_data(self): """Load the bond data from the given file It's assumed that the uncommented lines in the data file have the following format: symbol1 symbol2 number1 number2 bond_length_single_a bond_length_double_a bond_length_triple_a bond_length_single_b bond_length_double_b bond_length_triple_b ..." where a, b, ... stand for different sources. """ def read_units(unit_names): """convert unit_names into conversion factors""" tmp = { "A": units.angstrom, "pm": units.picometer, "nm": units.nanometer, } return [tmp[unit_name] for unit_name in unit_names] def read_length(BOND_TYPE, words, col): """Read the bondlengths from a single line in the data file""" nlow = int(words[2]) nhigh = int(words[3]) for i, conversion in zip(range((len(words) - 4) // 3), conversions): word = words[col + 3 + i*3] if word != 'NA': self.lengths[BOND_TYPE][frozenset([nlow, nhigh])] = float(word)*conversion return with pkg_resources.resource_stream(__name__, 'data/bonds.csv') as f: for line in f: words = line.decode('utf-8').split() if (len(words) > 0) and (words[0][0] != "#"): if words[0] == "unit": conversions = read_units(words[1:]) else: read_length(BOND_SINGLE, words, 1) read_length(BOND_DOUBLE, words, 2) read_length(BOND_TRIPLE, words, 3)
Load the bond data from the given file It's assumed that the uncommented lines in the data file have the following format: symbol1 symbol2 number1 number2 bond_length_single_a bond_length_double_a bond_length_triple_a bond_length_single_b bond_length_double_b bond_length_triple_b ..." where a, b, ... stand for different sources.
entailment
def _approximate_unkown_bond_lengths(self): """Completes the bond length database with approximations based on VDW radii""" dataset = self.lengths[BOND_SINGLE] for n1 in periodic.iter_numbers(): for n2 in periodic.iter_numbers(): if n1 <= n2: pair = frozenset([n1, n2]) atom1 = periodic[n1] atom2 = periodic[n2] #if (pair not in dataset) and hasattr(atom1, "covalent_radius") and hasattr(atom2, "covalent_radius"): if (pair not in dataset) and (atom1.covalent_radius is not None) and (atom2.covalent_radius is not None): dataset[pair] = (atom1.covalent_radius + atom2.covalent_radius)
Completes the bond length database with approximations based on VDW radii
entailment
def bonded(self, n1, n2, distance): """Return the estimated bond type Arguments: | ``n1`` -- the atom number of the first atom in the bond | ``n2`` -- the atom number of the second atom the bond | ``distance`` -- the distance between the two atoms This method checks whether for the given pair of atom numbers, the given distance corresponds to a certain bond length. The best matching bond type will be returned. If the distance is a factor ``self.bond_tolerance`` larger than a tabulated distance, the algorithm will not relate them. """ if distance > self.max_length * self.bond_tolerance: return None deviation = 0.0 pair = frozenset([n1, n2]) result = None for bond_type in bond_types: bond_length = self.lengths[bond_type].get(pair) if (bond_length is not None) and \ (distance < bond_length * self.bond_tolerance): if result is None: result = bond_type deviation = abs(bond_length - distance) else: new_deviation = abs(bond_length - distance) if deviation > new_deviation: result = bond_type deviation = new_deviation return result
Return the estimated bond type Arguments: | ``n1`` -- the atom number of the first atom in the bond | ``n2`` -- the atom number of the second atom the bond | ``distance`` -- the distance between the two atoms This method checks whether for the given pair of atom numbers, the given distance corresponds to a certain bond length. The best matching bond type will be returned. If the distance is a factor ``self.bond_tolerance`` larger than a tabulated distance, the algorithm will not relate them.
entailment
def get_length(self, n1, n2, bond_type=BOND_SINGLE): """Return the length of a bond between n1 and n2 of type bond_type Arguments: | ``n1`` -- the atom number of the first atom in the bond | ``n2`` -- the atom number of the second atom the bond Optional argument: | ``bond_type`` -- the type of bond [default=BOND_SINGLE] This is a safe method for querying a bond_length. If no answer can be found, this get_length returns None. """ dataset = self.lengths.get(bond_type) if dataset == None: return None return dataset.get(frozenset([n1, n2]))
Return the length of a bond between n1 and n2 of type bond_type Arguments: | ``n1`` -- the atom number of the first atom in the bond | ``n2`` -- the atom number of the second atom the bond Optional argument: | ``bond_type`` -- the type of bond [default=BOND_SINGLE] This is a safe method for querying a bond_length. If no answer can be found, this get_length returns None.
entailment
def from_parameters3(cls, lengths, angles): """Construct a 3D unit cell with the given parameters The a vector is always parallel with the x-axis and they point in the same direction. The b vector is always in the xy plane and points towards the positive y-direction. The c vector points towards the positive z-direction. """ for length in lengths: if length <= 0: raise ValueError("The length parameters must be strictly positive.") for angle in angles: if angle <= 0 or angle >= np.pi: raise ValueError("The angle parameters must lie in the range ]0 deg, 180 deg[.") del length del angle matrix = np.zeros((3, 3), float) # first cell vector along x-axis matrix[0, 0] = lengths[0] # second cell vector in x-y plane matrix[0, 1] = np.cos(angles[2])*lengths[1] matrix[1, 1] = np.sin(angles[2])*lengths[1] # Finding the third cell vector is slightly more difficult. :-) # It works like this: # The dot products of a with c, b with c and c with c are known. the # vector a has only an x component, b has no z component. This results # in the following equations: u_a = lengths[0]*lengths[2]*np.cos(angles[1]) u_b = lengths[1]*lengths[2]*np.cos(angles[0]) matrix[0, 2] = u_a/matrix[0, 0] matrix[1, 2] = (u_b - matrix[0, 1]*matrix[0, 2])/matrix[1, 1] u_c = lengths[2]**2 - matrix[0, 2]**2 - matrix[1, 2]**2 if u_c < 0: raise ValueError("The given cell parameters do not correspond to a unit cell.") matrix[2, 2] = np.sqrt(u_c) active = np.ones(3, bool) return cls(matrix, active)
Construct a 3D unit cell with the given parameters The a vector is always parallel with the x-axis and they point in the same direction. The b vector is always in the xy plane and points towards the positive y-direction. The c vector points towards the positive z-direction.
entailment
def volume(self): """The volume of the unit cell The actual definition of the volume depends on the number of active directions: * num_active == 0 -- always -1 * num_active == 1 -- length of the cell vector * num_active == 2 -- surface of the parallelogram * num_active == 3 -- volume of the parallelepiped """ active = self.active_inactive[0] if len(active) == 0: return -1 elif len(active) == 1: return np.linalg.norm(self.matrix[:, active[0]]) elif len(active) == 2: return np.linalg.norm(np.cross(self.matrix[:, active[0]], self.matrix[:, active[1]])) elif len(active) == 3: return abs(np.linalg.det(self.matrix))
The volume of the unit cell The actual definition of the volume depends on the number of active directions: * num_active == 0 -- always -1 * num_active == 1 -- length of the cell vector * num_active == 2 -- surface of the parallelogram * num_active == 3 -- volume of the parallelepiped
entailment
def active_inactive(self): """The indexes of the active and the inactive cell vectors""" active_indices = [] inactive_indices = [] for index, active in enumerate(self.active): if active: active_indices.append(index) else: inactive_indices.append(index) return active_indices, inactive_indices
The indexes of the active and the inactive cell vectors
entailment
def reciprocal(self): """The reciprocal of the unit cell In case of a three-dimensional periodic system, this is trivially the transpose of the inverse of the cell matrix. This means that each column of the matrix corresponds to a reciprocal cell vector. In case of lower-dimensional periodicity, the inactive columns are zero, and the active columns span the same sub space as the original cell vectors. """ U, S, Vt = np.linalg.svd(self.matrix*self.active) Sinv = np.zeros(S.shape, float) for i in range(3): if abs(S[i]) < self.eps: Sinv[i] = 0.0 else: Sinv[i] = 1.0/S[i] return np.dot(U*Sinv, Vt)*self.active
The reciprocal of the unit cell In case of a three-dimensional periodic system, this is trivially the transpose of the inverse of the cell matrix. This means that each column of the matrix corresponds to a reciprocal cell vector. In case of lower-dimensional periodicity, the inactive columns are zero, and the active columns span the same sub space as the original cell vectors.
entailment
def parameters(self): """The cell parameters (lengths and angles)""" length_a = np.linalg.norm(self.matrix[:, 0]) length_b = np.linalg.norm(self.matrix[:, 1]) length_c = np.linalg.norm(self.matrix[:, 2]) alpha = np.arccos(np.dot(self.matrix[:, 1], self.matrix[:, 2]) / (length_b * length_c)) beta = np.arccos(np.dot(self.matrix[:, 2], self.matrix[:, 0]) / (length_c * length_a)) gamma = np.arccos(np.dot(self.matrix[:, 0], self.matrix[:, 1]) / (length_a * length_b)) return ( np.array([length_a, length_b, length_c], float), np.array([alpha, beta, gamma], float) )
The cell parameters (lengths and angles)
entailment
def ordered(self): """An equivalent unit cell with the active cell vectors coming first""" active, inactive = self.active_inactive order = active + inactive return UnitCell(self.matrix[:,order], self.active[order])
An equivalent unit cell with the active cell vectors coming first
entailment
def alignment_a(self): """Computes the rotation matrix that aligns the unit cell with the Cartesian axes, starting with cell vector a. * a parallel to x * b in xy-plane with b_y positive * c with c_z positive """ from molmod.transformations import Rotation new_x = self.matrix[:, 0].copy() new_x /= np.linalg.norm(new_x) new_z = np.cross(new_x, self.matrix[:, 1]) new_z /= np.linalg.norm(new_z) new_y = np.cross(new_z, new_x) new_y /= np.linalg.norm(new_y) return Rotation(np.array([new_x, new_y, new_z]))
Computes the rotation matrix that aligns the unit cell with the Cartesian axes, starting with cell vector a. * a parallel to x * b in xy-plane with b_y positive * c with c_z positive
entailment
def spacings(self): """Computes the distances between neighboring crystal planes""" result_invsq = (self.reciprocal**2).sum(axis=0) result = np.zeros(3, float) for i in range(3): if result_invsq[i] > 0: result[i] = result_invsq[i]**(-0.5) return result
Computes the distances between neighboring crystal planes
entailment
def add_cell_vector(self, vector): """Returns a new unit cell with an additional cell vector""" act = self.active_inactive[0] if len(act) == 3: raise ValueError("The unit cell already has three active cell vectors.") matrix = np.zeros((3, 3), float) active = np.zeros(3, bool) if len(act) == 0: # Add the new vector matrix[:, 0] = vector active[0] = True return UnitCell(matrix, active) a = self.matrix[:, act[0]] matrix[:, 0] = a active[0] = True if len(act) == 1: # Add the new vector matrix[:, 1] = vector active[1] = True return UnitCell(matrix, active) b = self.matrix[:, act[1]] matrix[:, 1] = b active[1] = True if len(act) == 2: # Add the new vector matrix[:, 2] = vector active[2] = True return UnitCell(matrix, active)
Returns a new unit cell with an additional cell vector
entailment
def get_radius_ranges(self, radius, mic=False): """Return ranges of indexes of the interacting neighboring unit cells Interacting neighboring unit cells have at least one point in their box volume that has a distance smaller or equal than radius to at least one point in the central cell. This concept is of importance when computing pair wise long-range interactions in periodic systems. The mic (stands for minimum image convention) option can be used to change the behavior of this routine such that only neighboring cells are considered that have at least one point withing a distance below `radius` from the center of the reference cell. """ result = np.zeros(3, int) for i in range(3): if self.spacings[i] > 0: if mic: result[i] = np.ceil(radius/self.spacings[i]-0.5) else: result[i] = np.ceil(radius/self.spacings[i]) return result
Return ranges of indexes of the interacting neighboring unit cells Interacting neighboring unit cells have at least one point in their box volume that has a distance smaller or equal than radius to at least one point in the central cell. This concept is of importance when computing pair wise long-range interactions in periodic systems. The mic (stands for minimum image convention) option can be used to change the behavior of this routine such that only neighboring cells are considered that have at least one point withing a distance below `radius` from the center of the reference cell.
entailment
def get_radius_indexes(self, radius, max_ranges=None): """Return the indexes of the interacting neighboring unit cells Interacting neighboring unit cells have at least one point in their box volume that has a distance smaller or equal than radius to at least one point in the central cell. This concept is of importance when computing pair wise long-range interactions in periodic systems. Argument: | ``radius`` -- the radius of the interaction sphere Optional argument: | ``max_ranges`` -- numpy array with three elements: The maximum ranges of indexes to consider. This is practical when working with the minimum image convention to reduce the generated bins to the minimum image. (see binning.py) Use -1 to avoid such limitations. The default is three times -1. """ if max_ranges is None: max_ranges = np.array([-1, -1, -1]) ranges = self.get_radius_ranges(radius)*2+1 mask = (max_ranges != -1) & (max_ranges < ranges) ranges[mask] = max_ranges[mask] max_size = np.product(self.get_radius_ranges(radius)*2 + 1) indexes = np.zeros((max_size, 3), int) from molmod.ext import unit_cell_get_radius_indexes reciprocal = self.reciprocal*self.active matrix = self.matrix*self.active size = unit_cell_get_radius_indexes( matrix, reciprocal, radius, max_ranges, indexes ) return indexes[:size]
Return the indexes of the interacting neighboring unit cells Interacting neighboring unit cells have at least one point in their box volume that has a distance smaller or equal than radius to at least one point in the central cell. This concept is of importance when computing pair wise long-range interactions in periodic systems. Argument: | ``radius`` -- the radius of the interaction sphere Optional argument: | ``max_ranges`` -- numpy array with three elements: The maximum ranges of indexes to consider. This is practical when working with the minimum image convention to reduce the generated bins to the minimum image. (see binning.py) Use -1 to avoid such limitations. The default is three times -1.
entailment
def guess_geometry(graph, unit_cell=None, verbose=False): """Construct a molecular geometry based on a molecular graph. This routine does not require initial coordinates and will give a very rough picture of the initial geometry. Do not expect all details to be in perfect condition. A subsequent optimization with a more accurate level of theory is at least advisable. Argument: | ``graph`` -- The molecular graph of the system, see :class:molmod.molecular_graphs.MolecularGraph Optional argument: | ``unit_cell`` -- periodic boundry conditions, see :class:`molmod.unit_cells.UnitCell` | ``verbose`` -- Show optimizer progress when True """ N = len(graph.numbers) from molmod.minimizer import Minimizer, ConjugateGradient, \ NewtonLineSearch, ConvergenceCondition, StopLossCondition search_direction = ConjugateGradient() line_search = NewtonLineSearch() convergence = ConvergenceCondition(grad_rms=1e-6, step_rms=1e-6) stop_loss = StopLossCondition(max_iter=500, fun_margin=0.1) ff = ToyFF(graph, unit_cell) x_init = np.random.normal(0, 1, N*3) # level 1 geometry optimization: graph based ff.dm_quad = 1.0 minimizer = Minimizer(x_init, ff, search_direction, line_search, convergence, stop_loss, anagrad=True, verbose=verbose) x_init = minimizer.x # level 2 geometry optimization: graph based + pauli repulsion ff.dm_quad = 1.0 ff.dm_reci = 1.0 minimizer = Minimizer(x_init, ff, search_direction, line_search, convergence, stop_loss, anagrad=True, verbose=verbose) x_init = minimizer.x # Add a little noise to avoid saddle points x_init += np.random.uniform(-0.01, 0.01, len(x_init)) # level 3 geometry optimization: bond lengths + pauli ff.dm_quad = 0.0 ff.dm_reci = 0.2 ff.bond_quad = 1.0 minimizer = Minimizer(x_init, ff, search_direction, line_search, convergence, stop_loss, anagrad=True, verbose=verbose) x_init = minimizer.x # level 4 geometry optimization: bond lengths + bending angles + pauli ff.bond_quad = 0.0 ff.bond_hyper = 1.0 ff.span_quad = 1.0 minimizer = Minimizer(x_init, ff, search_direction, line_search, convergence, stop_loss, anagrad=True, verbose=verbose) x_init = minimizer.x x_opt = x_init mol = Molecule(graph.numbers, x_opt.reshape((N, 3))) return mol
Construct a molecular geometry based on a molecular graph. This routine does not require initial coordinates and will give a very rough picture of the initial geometry. Do not expect all details to be in perfect condition. A subsequent optimization with a more accurate level of theory is at least advisable. Argument: | ``graph`` -- The molecular graph of the system, see :class:molmod.molecular_graphs.MolecularGraph Optional argument: | ``unit_cell`` -- periodic boundry conditions, see :class:`molmod.unit_cells.UnitCell` | ``verbose`` -- Show optimizer progress when True
entailment
def tune_geometry(graph, mol, unit_cell=None, verbose=False): """Fine tune a molecular geometry, starting from a (very) poor guess of the initial geometry. Do not expect all details to be in perfect condition. A subsequent optimization with a more accurate level of theory is at least advisable. Arguments: | ``graph`` -- The molecular graph of the system, see :class:molmod.molecular_graphs.MolecularGraph | ``mol`` -- A :class:molmod.molecules.Molecule class with the initial guess of the coordinates Optional argument: | ``unit_cell`` -- periodic boundry conditions, see :class:`molmod.unit_cells.UnitCell` | ``verbose`` -- Show optimizer progress when True """ N = len(graph.numbers) from molmod.minimizer import Minimizer, ConjugateGradient, \ NewtonLineSearch, ConvergenceCondition, StopLossCondition search_direction = ConjugateGradient() line_search = NewtonLineSearch() convergence = ConvergenceCondition(grad_rms=1e-6, step_rms=1e-6) stop_loss = StopLossCondition(max_iter=500, fun_margin=1.0) ff = ToyFF(graph, unit_cell) x_init = mol.coordinates.ravel() # level 3 geometry optimization: bond lengths + pauli ff.dm_reci = 0.2 ff.bond_quad = 1.0 minimizer = Minimizer(x_init, ff, search_direction, line_search, convergence, stop_loss, anagrad=True, verbose=verbose) x_init = minimizer.x # level 4 geometry optimization: bond lengths + bending angles + pauli ff.bond_quad = 0.0 ff.bond_hyper = 1.0 ff.span_quad = 1.0 minimizer = Minimizer(x_init, ff, search_direction, line_search, convergence, stop_loss, anagrad=True, verbose=verbose) x_init = minimizer.x x_opt = x_init mol = Molecule(graph.numbers, x_opt.reshape((N, 3))) return mol
Fine tune a molecular geometry, starting from a (very) poor guess of the initial geometry. Do not expect all details to be in perfect condition. A subsequent optimization with a more accurate level of theory is at least advisable. Arguments: | ``graph`` -- The molecular graph of the system, see :class:molmod.molecular_graphs.MolecularGraph | ``mol`` -- A :class:molmod.molecules.Molecule class with the initial guess of the coordinates Optional argument: | ``unit_cell`` -- periodic boundry conditions, see :class:`molmod.unit_cells.UnitCell` | ``verbose`` -- Show optimizer progress when True
entailment
def update_coordinates(self, coordinates=None): """Update the coordinates (and derived quantities) Argument: coordinates -- new Cartesian coordinates of the system """ if coordinates is not None: self.coordinates = coordinates self.numc = len(self.coordinates) self.distances = np.zeros((self.numc, self.numc), float) self.deltas = np.zeros((self.numc, self.numc, 3), float) self.directions = np.zeros((self.numc, self.numc, 3), float) self.dirouters = np.zeros((self.numc, self.numc, 3, 3), float) for index1, coordinate1 in enumerate(self.coordinates): for index2, coordinate2 in enumerate(self.coordinates): delta = coordinate1 - coordinate2 self.deltas[index1, index2] = delta distance = np.linalg.norm(delta) self.distances[index1, index2] = distance if index1 != index2: tmp = delta/distance self.directions[index1, index2] = tmp self.dirouters[index1, index2] = np.outer(tmp, tmp)
Update the coordinates (and derived quantities) Argument: coordinates -- new Cartesian coordinates of the system
entailment
def energy(self): """Compute the energy of the system""" result = 0.0 for index1 in range(self.numc): for index2 in range(index1): if self.scaling[index1, index2] > 0: for se, ve in self.yield_pair_energies(index1, index2): result += se*ve*self.scaling[index1, index2] return result
Compute the energy of the system
entailment
def gradient_component(self, index1): """Compute the gradient of the energy for one atom""" result = np.zeros(3, float) for index2 in range(self.numc): if self.scaling[index1, index2] > 0: for (se, ve), (sg, vg) in zip(self.yield_pair_energies(index1, index2), self.yield_pair_gradients(index1, index2)): result += (sg*self.directions[index1, index2]*ve + se*vg)*self.scaling[index1, index2] return result
Compute the gradient of the energy for one atom
entailment
def gradient(self): """Compute the gradient of the energy for all atoms""" result = np.zeros((self.numc, 3), float) for index1 in range(self.numc): result[index1] = self.gradient_component(index1) return result
Compute the gradient of the energy for all atoms
entailment
def hessian_component(self, index1, index2): """Compute the hessian of the energy for one atom pair""" result = np.zeros((3, 3), float) if index1 == index2: for index3 in range(self.numc): if self.scaling[index1, index3] > 0: d_1 = 1/self.distances[index1, index3] for (se, ve), (sg, vg), (sh, vh) in zip( self.yield_pair_energies(index1, index3), self.yield_pair_gradients(index1, index3), self.yield_pair_hessians(index1, index3) ): result += ( +sh*self.dirouters[index1, index3]*ve +sg*(np.identity(3, float) - self.dirouters[index1, index3])*ve*d_1 +sg*np.outer(self.directions[index1, index3], vg) +sg*np.outer(vg, self.directions[index1, index3]) +se*vh )*self.scaling[index1, index3] elif self.scaling[index1, index2] > 0: d_1 = 1/self.distances[index1, index2] for (se, ve), (sg, vg), (sh, vh) in zip( self.yield_pair_energies(index1, index2), self.yield_pair_gradients(index1, index2), self.yield_pair_hessians(index1, index2) ): result -= ( +sh*self.dirouters[index1, index2]*ve +sg*(np.identity(3, float) - self.dirouters[index1, index2])*ve*d_1 +sg*np.outer(self.directions[index1, index2], vg) +sg*np.outer(vg, self.directions[index1, index2]) +se*vh )*self.scaling[index1, index2] return result
Compute the hessian of the energy for one atom pair
entailment
def hessian(self): """Compute the hessian of the energy""" result = np.zeros((self.numc, 3, self.numc, 3), float) for index1 in range(self.numc): for index2 in range(self.numc): result[index1, :, index2, :] = self.hessian_component(index1, index2) return result
Compute the hessian of the energy
entailment
def yield_pair_energies(self, index1, index2): """Yields pairs ((s(r_ij), v(bar{r}_ij))""" d_1 = 1/self.distances[index1, index2] if self.charges is not None: c1 = self.charges[index1] c2 = self.charges[index2] yield c1*c2*d_1, 1 if self.dipoles is not None: d_3 = d_1**3 d_5 = d_1**5 delta = self.deltas[index1, index2] p1 = self.dipoles[index1] p2 = self.dipoles[index2] yield d_3*np.dot(p1, p2), 1 yield -3*d_5, np.dot(p1, delta)*np.dot(delta, p2) if self.charges is not None: yield c1*d_3, np.dot(p2, delta) yield c2*d_3, np.dot(p1, -delta)
Yields pairs ((s(r_ij), v(bar{r}_ij))
entailment
def yield_pair_gradients(self, index1, index2): """Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))""" d_2 = 1/self.distances[index1, index2]**2 if self.charges is not None: c1 = self.charges[index1] c2 = self.charges[index2] yield -c1*c2*d_2, np.zeros(3) if self.dipoles is not None: d_4 = d_2**2 d_6 = d_2**3 delta = self.deltas[index1, index2] p1 = self.dipoles[index1] p2 = self.dipoles[index2] yield -3*d_4*np.dot(p1, p2), np.zeros(3) yield 15*d_6, p1*np.dot(p2, delta) + p2*np.dot(p1, delta) if self.charges is not None: yield -3*c1*d_4, p2 yield -3*c2*d_4, -p1
Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))
entailment
def yield_pair_hessians(self, index1, index2): """Yields pairs ((s''(r_ij), grad_i (x) grad_i v(bar{r}_ij))""" d_1 = 1/self.distances[index1, index2] d_3 = d_1**3 if self.charges is not None: c1 = self.charges[index1] c2 = self.charges[index2] yield 2*c1*c2*d_3, np.zeros((3, 3)) if self.dipoles is not None: d_5 = d_1**5 d_7 = d_1**7 p1 = self.dipoles[index1] p2 = self.dipoles[index2] yield 12*d_5*np.dot(p1, p2), np.zeros((3, 3)) yield -90*d_7, np.outer(p1, p2) + np.outer(p2, p1) if self.charges is not None: yield 12*c1*d_5, np.zeros((3, 3)) yield 12*c2*d_5, np.zeros((3, 3))
Yields pairs ((s''(r_ij), grad_i (x) grad_i v(bar{r}_ij))
entailment
def esp(self): """Compute the electrostatic potential at each atom due to other atoms""" result = np.zeros(self.numc, float) for index1 in range(self.numc): result[index1] = self.esp_component(index1) return result
Compute the electrostatic potential at each atom due to other atoms
entailment
def efield(self): """Compute the electrostatic potential at each atom due to other atoms""" result = np.zeros((self.numc,3), float) for index1 in range(self.numc): result[index1] = self.efield_component(index1) return result
Compute the electrostatic potential at each atom due to other atoms
entailment
def yield_pair_energies(self, index1, index2): """Yields pairs ((s(r_ij), v(bar{r}_ij))""" strength = self.strengths[index1, index2] distance = self.distances[index1, index2] yield strength*distance**(-6), 1
Yields pairs ((s(r_ij), v(bar{r}_ij))
entailment
def yield_pair_gradients(self, index1, index2): """Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))""" strength = self.strengths[index1, index2] distance = self.distances[index1, index2] yield -6*strength*distance**(-7), np.zeros(3)
Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))
entailment
def yield_pair_energies(self, index1, index2): """Yields pairs ((s(r_ij), v(bar{r}_ij))""" A = self.As[index1, index2] B = self.Bs[index1, index2] distance = self.distances[index1, index2] yield A*np.exp(-B*distance), 1
Yields pairs ((s(r_ij), v(bar{r}_ij))
entailment
def yield_pair_gradients(self, index1, index2): """Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))""" A = self.As[index1, index2] B = self.Bs[index1, index2] distance = self.distances[index1, index2] yield -B*A*np.exp(-B*distance), np.zeros(3)
Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))
entailment
def load_cml(cml_filename): """Load the molecules from a CML file Argument: | ``cml_filename`` -- The filename of a CML file. Returns a list of molecule objects with optional molecular graph attribute and extra attributes. """ parser = make_parser() parser.setFeature(feature_namespaces, 0) dh = CMLMoleculeLoader() parser.setContentHandler(dh) parser.parse(cml_filename) return dh.molecules
Load the molecules from a CML file Argument: | ``cml_filename`` -- The filename of a CML file. Returns a list of molecule objects with optional molecular graph attribute and extra attributes.
entailment
def _dump_cml_molecule(f, molecule): """Dump a single molecule to a CML file Arguments: | ``f`` -- a file-like object | ``molecule`` -- a Molecule instance """ extra = getattr(molecule, "extra", {}) attr_str = " ".join("%s='%s'" % (key, value) for key, value in extra.items()) f.write(" <molecule id='%s' %s>\n" % (molecule.title, attr_str)) f.write(" <atomArray>\n") atoms_extra = getattr(molecule, "atoms_extra", {}) for counter, number, coordinate in zip(range(molecule.size), molecule.numbers, molecule.coordinates/angstrom): atom_extra = atoms_extra.get(counter, {}) attr_str = " ".join("%s='%s'" % (key, value) for key, value in atom_extra.items()) f.write(" <atom id='a%i' elementType='%s' x3='%s' y3='%s' z3='%s' %s />\n" % ( counter, periodic[number].symbol, coordinate[0], coordinate[1], coordinate[2], attr_str, )) f.write(" </atomArray>\n") if molecule.graph is not None: bonds_extra = getattr(molecule, "bonds_extra", {}) f.write(" <bondArray>\n") for edge in molecule.graph.edges: bond_extra = bonds_extra.get(edge, {}) attr_str = " ".join("%s='%s'" % (key, value) for key, value in bond_extra.items()) i1, i2 = edge f.write(" <bond atomRefs2='a%i a%i' %s />\n" % (i1, i2, attr_str)) f.write(" </bondArray>\n") f.write(" </molecule>\n")
Dump a single molecule to a CML file Arguments: | ``f`` -- a file-like object | ``molecule`` -- a Molecule instance
entailment
def dump_cml(f, molecules): """Write a list of molecules to a CML file Arguments: | ``f`` -- a filename of a CML file or a file-like object | ``molecules`` -- a list of molecule objects. """ if isinstance(f, str): f = open(f, "w") close = True else: close = False f.write("<?xml version='1.0'?>\n") f.write("<list xmlns='http://www.xml-cml.org/schema'>\n") for molecule in molecules: _dump_cml_molecule(f, molecule) f.write("</list>\n") if close: f.close()
Write a list of molecules to a CML file Arguments: | ``f`` -- a filename of a CML file or a file-like object | ``molecules`` -- a list of molecule objects.
entailment