idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
249,800 | def get_stocks ( self , symbols : List [ str ] ) -> List [ Commodity ] : query = ( self . query . filter ( Commodity . mnemonic . in_ ( symbols ) ) ) . order_by ( Commodity . namespace , Commodity . mnemonic ) return query . all ( ) | loads stocks by symbol |
249,801 | def get_aggregate ( self , security : Commodity ) -> SecurityAggregate : assert security is not None assert isinstance ( security , Commodity ) return SecurityAggregate ( self . book , security ) | Returns the aggregate for the entity |
249,802 | def get_aggregate_for_symbol ( self , symbol : str ) -> SecurityAggregate : security = self . get_by_symbol ( symbol ) if not security : raise ValueError ( f"Security not found in GC book: {symbol}!" ) return self . get_aggregate ( security ) | Returns the aggregate for the security found by full symbol |
249,803 | def query ( self ) : query = ( self . book . session . query ( Commodity ) . filter ( Commodity . namespace != "CURRENCY" , Commodity . namespace != "template" ) ) return query | Returns the base query which filters out data for all queries . |
249,804 | def book ( self ) -> Book : if not self . __book : book_uri = self . settings . database_path self . __book = Database ( book_uri ) . open_book ( for_writing = self . __for_writing ) return self . __book | GnuCash Book . Opens the book or creates an database based on settings . |
249,805 | def accounts ( self ) -> AccountsAggregate : if not self . __accounts_aggregate : self . __accounts_aggregate = AccountsAggregate ( self . book ) return self . __accounts_aggregate | Returns the Accounts aggregate |
249,806 | def currencies ( self ) -> CurrenciesAggregate : if not self . __currencies_aggregate : self . __currencies_aggregate = CurrenciesAggregate ( self . book ) return self . __currencies_aggregate | Returns the Currencies aggregate |
249,807 | def securities ( self ) : if not self . __securities_aggregate : self . __securities_aggregate = SecuritiesAggregate ( self . book ) return self . __securities_aggregate | Returns securities aggregate |
249,808 | def get_currency_symbols ( self ) -> List [ str ] : result = [ ] currencies = self . currencies . get_book_currencies ( ) for cur in currencies : result . append ( cur . mnemonic ) return result | Returns the used currencies symbols as an array |
249,809 | def load_jinja_template ( file_name ) : original_script_path = sys . argv [ 0 ] script_dir = os . path . dirname ( original_script_path ) from jinja2 import Environment , FileSystemLoader env = Environment ( loader = FileSystemLoader ( script_dir ) ) template = env . get_template ( file_name ) return template | Loads the jinja2 HTML template from the given file . Assumes that the file is in the same directory as the script . |
249,810 | def get_days_in_month ( year : int , month : int ) -> int : month_range = calendar . monthrange ( year , month ) return month_range [ 1 ] | Returns number of days in the given month . 1 - based numbers as arguments . i . e . November = 11 |
249,811 | def get_from_gnucash26_date ( date_str : str ) -> date : date_format = "%Y%m%d" result = datetime . strptime ( date_str , date_format ) . date ( ) return result | Creates a datetime from GnuCash 2 . 6 date string |
249,812 | def parse_period ( period : str ) : period = period . split ( " - " ) date_from = Datum ( ) if len ( period [ 0 ] ) == 10 : date_from . from_iso_date_string ( period [ 0 ] ) else : date_from . from_iso_long_date ( period [ 0 ] ) date_from . start_of_day ( ) date_to = Datum ( ) if len ( period [ 1 ] ) == 10 : date_to . from_iso_date_string ( period [ 1 ] ) else : date_to . from_iso_long_date ( period [ 1 ] ) date_to . end_of_day ( ) return date_from . value , date_to . value | parses period from date range picker . The received values are full ISO date |
249,813 | def get_period ( date_from : date , date_to : date ) -> str : assert isinstance ( date_from , date ) assert isinstance ( date_to , date ) str_from : str = date_from . isoformat ( ) str_to : str = date_to . isoformat ( ) return str_from + " - " + str_to | Returns the period string from the given dates |
249,814 | def load_json_file_contents ( path : str ) -> str : assert isinstance ( path , str ) content = None file_path = os . path . abspath ( path ) content = fileutils . read_text_from_file ( file_path ) json_object = json . loads ( content ) content = json . dumps ( json_object , sort_keys = True , indent = 4 ) return content | Loads contents from a json file |
249,815 | def validate_json ( data : str ) : result = None try : result = json . loads ( data ) except ValueError as error : log ( ERROR , "invalid json: %s" , error ) return result | Validate JSON by parsing string data . Returns the json dict . |
249,816 | def get_sql ( query ) : sql = str ( query . statement . compile ( dialect = sqlite . dialect ( ) , compile_kwargs = { "literal_binds" : True } ) ) return sql | Returns the sql query |
249,817 | def save_to_temp ( content , file_name = None ) : temp_dir = tempfile . gettempdir ( ) out_file = os . path . join ( temp_dir , file_name ) file = open ( out_file , 'w' ) file . write ( content ) file . close ( ) return out_file | Save the contents into a temp file . |
249,818 | def read_book_uri_from_console ( ) : db_path : str = input ( "Enter book_url or leave blank for the default settings value: " ) if db_path : if db_path . startswith ( "sqlite://" ) : db_path_uri = db_path else : db_path_uri = "file:///" + db_path else : cfg = settings . Settings ( ) db_path_uri = cfg . database_uri return db_path_uri | Prompts the user to enter book url in console |
249,819 | def run_report_from_console ( output_file_name , callback ) : print ( "The report uses a read-only access to the book." ) print ( "Now enter the data or ^Z to continue:" ) result = callback ( ) output = save_to_temp ( result , output_file_name ) webbrowser . open ( output ) | Runs the report from the command line . Receives the book url from the console . |
249,820 | def get_dividend_sum_for_symbol ( book : Book , symbol : str ) : svc = SecuritiesAggregate ( book ) security = svc . get_by_symbol ( symbol ) sec_svc = SecurityAggregate ( book , security ) accounts = sec_svc . get_income_accounts ( ) total = Decimal ( 0 ) for account in accounts : income = get_dividend_sum ( book , account ) total += income return total | Calculates all income for a symbol |
249,821 | def import_file ( filename ) : file_path = os . path . abspath ( filename ) log ( DEBUG , "Loading prices from %s" , file_path ) prices = __read_prices_from_file ( file_path ) with BookAggregate ( for_writing = True ) as svc : svc . prices . import_prices ( prices ) print ( "Saving book..." ) svc . book . save ( ) | Imports the commodity prices from the given . csv file . |
249,822 | def generate_report ( book_url ) : shares_no = None avg_price = None stock_template = templates . load_jinja_template ( "stock_template.html" ) stock_rows = "" with piecash . open_book ( book_url , readonly = True , open_if_lock = True ) as book : all_stocks = portfoliovalue . get_all_stocks ( book ) for stock in all_stocks : for_date = datetime . today ( ) . date model = portfoliovalue . get_stock_model_from ( book , stock , for_date ) stock_rows += stock_template . render ( model ) template = templates . load_jinja_template ( "template.html" ) result = template . render ( ** locals ( ) ) return result | Generates an HTML report content . |
249,823 | def main ( symbol : str ) : print ( "Displaying the balance for" , symbol ) with BookAggregate ( ) as svc : security = svc . book . get ( Commodity , mnemonic = symbol ) sec_svc = SecurityAggregate ( svc . book , security ) shares_no = sec_svc . get_quantity ( ) print ( "Quantity:" , shares_no ) avg_price = sec_svc . get_avg_price ( ) print ( "Average price:" , avg_price ) | Displays the balance for the security symbol . |
249,824 | def generate_report ( book_url ) : with piecash . open_book ( book_url , readonly = True , open_if_lock = True ) as book : accounts = [ acc . fullname for acc in book . accounts ] return f | Generates the report HTML . |
249,825 | def get_project_files ( ) : if is_git_project ( ) : return get_git_project_files ( ) project_files = [ ] for top , subdirs , files in os . walk ( '.' ) : for subdir in subdirs : if subdir . startswith ( '.' ) : subdirs . remove ( subdir ) for f in files : if f . startswith ( '.' ) : continue project_files . append ( os . path . join ( top , f ) ) return project_files | Retrieve a list of project files ignoring hidden files . |
249,826 | def print_success_message ( message ) : try : import colorama print ( colorama . Fore . GREEN + message + colorama . Fore . RESET ) except ImportError : print ( message ) | Print a message indicating success in green color to STDOUT . |
249,827 | def print_failure_message ( message ) : try : import colorama print ( colorama . Fore . RED + message + colorama . Fore . RESET , file = sys . stderr ) except ImportError : print ( message , file = sys . stderr ) | Print a message indicating failure in red color to STDERR . |
249,828 | def main ( ) : importer = ExchangeRatesImporter ( ) print ( "####################################" ) latest_rates_json = importer . get_latest_rates ( ) mapper = None rates = mapper . map_to_model ( latest_rates_json ) print ( "####################################" ) print ( "importing rates into gnucash..." ) with BookAggregate ( for_writing = False ) as svc : svc . currencies . import_fx_rates ( rates ) print ( "####################################" ) print ( "displaying rates from gnucash..." ) importer . display_gnucash_rates ( ) | Default entry point |
249,829 | def generate_asset_allocation_report ( book_url ) : model = load_asset_allocation_model ( book_url ) template = templates . load_jinja_template ( "report_asset_allocation.html" ) result = template . render ( model = model ) return result | The otput is generated here . Separated from the generate_report function to allow executing from the command line . |
249,830 | def parse_value ( self , value_string : str ) : self . value = Decimal ( value_string ) return self . value | Parses the amount string . |
249,831 | def parse ( self , csv_row : str ) : self . date = self . parse_euro_date ( csv_row [ 2 ] ) self . symbol = csv_row [ 0 ] self . value = self . parse_value ( csv_row [ 1 ] ) return self | Parses the . csv row into own values |
249,832 | def load_cash_balances_with_children ( self , root_account_fullname : str ) : assert isinstance ( root_account_fullname , str ) svc = AccountsAggregate ( self . book ) root_account = svc . get_by_fullname ( root_account_fullname ) if not root_account : raise ValueError ( "Account not found" , root_account_fullname ) accounts = self . __get_all_child_accounts_as_array ( root_account ) model = { } for account in accounts : if account . commodity . namespace != "CURRENCY" or account . placeholder : continue currency_symbol = account . commodity . mnemonic if not currency_symbol in model : currency_record = { "name" : currency_symbol , "total" : 0 , "rows" : [ ] } model [ currency_symbol ] = currency_record else : currency_record = model [ currency_symbol ] balance = account . get_balance ( ) row = { "name" : account . name , "fullname" : account . fullname , "currency" : currency_symbol , "balance" : balance } currency_record [ "rows" ] . append ( row ) total = Decimal ( currency_record [ "total" ] ) total += balance currency_record [ "total" ] = total return model | loads data for cash balances |
249,833 | def get_balance ( self ) : on_date = Datum ( ) on_date . today ( ) return self . get_balance_on ( on_date . value ) | Current account balance |
249,834 | def get_splits_query ( self ) : query = ( self . book . session . query ( Split ) . filter ( Split . account == self . account ) ) return query | Returns all the splits in the account |
249,835 | def get_transactions ( self , date_from : datetime , date_to : datetime ) -> List [ Transaction ] : assert isinstance ( date_from , datetime ) assert isinstance ( date_to , datetime ) dt_from = Datum ( ) dt_from . from_datetime ( date_from ) dt_from . start_of_day ( ) dt_to = Datum ( ) dt_to . from_datetime ( date_to ) dt_to . end_of_day ( ) query = ( self . book . session . query ( Transaction ) . join ( Split ) . filter ( Split . account_guid == self . account . guid ) . filter ( Transaction . post_date >= dt_from . date , Transaction . post_date <= dt_to . date ) . order_by ( Transaction . post_date ) ) return query . all ( ) | Returns account transactions |
249,836 | def __get_all_child_accounts_as_array ( self , account : Account ) -> List [ Account ] : result = [ ] result . append ( account ) for child in account . children : sub_accounts = self . __get_all_child_accounts_as_array ( child ) result += sub_accounts return result | Returns the whole tree of child accounts in a list |
249,837 | def find_by_name ( self , term : str , include_placeholders : bool = False ) -> List [ Account ] : query = ( self . query . filter ( Account . name . like ( '%' + term + '%' ) ) . order_by ( Account . name ) ) if not include_placeholders : query = query . filter ( Account . placeholder == 0 ) return query . all ( ) | Search for account by part of the name |
249,838 | def get_aggregate_by_id ( self , account_id : str ) -> AccountAggregate : account = self . get_by_id ( account_id ) return self . get_account_aggregate ( account ) | Returns the aggregate for the given id |
249,839 | def get_by_fullname ( self , fullname : str ) -> Account : query = ( self . book . session . query ( Account ) ) all_accounts = query . all ( ) for account in all_accounts : if account . fullname == fullname : return account return None | Loads account by full name |
249,840 | def get_account_id_by_fullname ( self , fullname : str ) -> str : account = self . get_by_fullname ( fullname ) return account . guid | Locates the account by fullname |
249,841 | def get_all_children ( self , fullname : str ) -> List [ Account ] : root_acct = self . get_by_fullname ( fullname ) if not root_acct : raise NameError ( "Account not found in book!" ) acct_agg = self . get_account_aggregate ( root_acct ) result = acct_agg . get_all_child_accounts_as_array ( ) return result | Returns the whole child account tree for the account with the given full name |
249,842 | def get_all ( self ) -> List [ Account ] : return [ account for account in self . book . accounts if account . parent . name != "Template Root" ] | Returns all book accounts as a list excluding templates . |
249,843 | def get_favourite_accounts ( self ) -> List [ Account ] : from gnucash_portfolio . lib . settings import Settings settings = Settings ( ) favourite_accts = settings . favourite_accounts accounts = self . get_list ( favourite_accts ) return accounts | Provides a list of favourite accounts |
249,844 | def get_favourite_account_aggregates ( self ) -> List [ AccountAggregate ] : accounts = self . get_favourite_accounts ( ) aggregates = [ ] for account in accounts : aggregate = self . get_account_aggregate ( account ) aggregates . append ( aggregate ) return aggregates | Returns the list of aggregates for favourite accounts |
249,845 | def get_by_id ( self , acct_id ) -> Account : return self . book . get ( Account , guid = acct_id ) | Loads an account entity |
249,846 | def get_by_name ( self , name : str ) -> List [ Account ] : return self . get_by_name_from ( self . book . root , name ) | Searches accounts by name |
249,847 | def get_by_name_from ( self , root : Account , name : str ) -> List [ Account ] : result = [ ] if root . name == name : result . append ( root ) for child in root . children : child_results = self . get_by_name_from ( child , name ) result += child_results return result | Searches child accounts by name starting from the given account |
249,848 | def get_list ( self , ids : List [ str ] ) -> List [ Account ] : query = ( self . query . filter ( Account . guid . in_ ( ids ) ) ) return query . all ( ) | Loads accounts by the ids passed as an argument |
249,849 | def query ( self ) : query = ( self . book . session . query ( Account ) . join ( Commodity ) . filter ( Commodity . namespace != "template" ) . filter ( Account . type != AccountType . root . value ) ) return query | Main accounts query |
249,850 | def search ( self , name : str = None , acc_type : str = None ) : query = self . query if name is not None : query = query . filter ( Account . name == name ) if acc_type is not None : acc_type = acc_type . upper ( ) query = query . filter ( Account . type == acc_type ) return query . all ( ) | Search accounts by passing parameters . name = exact name name_part = part of name parent_id = id of the parent account type = account type |
249,851 | def get_price_as_of ( self , stock : Commodity , on_date : datetime ) : prices = PriceDbApplication ( ) prices . get_prices_on ( on_date . date ( ) . isoformat ( ) , stock . namespace , stock . mnemonic ) | Gets the latest price on or before the given date . |
249,852 | def import_price ( self , price : PriceModel ) : symbol = price . symbol if "." in symbol : symbol = price . symbol . split ( "." ) [ 0 ] stock = SecuritiesAggregate ( self . book ) . get_by_symbol ( symbol ) if stock is None : logging . warning ( "security %s not found in book." , price . symbol ) return False existing_prices = stock . prices . filter ( Price . date == price . datetime . date ( ) ) . all ( ) if not existing_prices : self . __create_price_for ( stock , price ) else : logging . warning ( "price already exists for %s on %s" , stock . mnemonic , price . datetime . strftime ( "%Y-%m-%d" ) ) existing_price = existing_prices [ 0 ] existing_price . value = price . value return True | Import individual price |
249,853 | def __create_price_for ( self , commodity : Commodity , price : PriceModel ) : logging . info ( "Adding a new price for %s, %s, %s" , commodity . mnemonic , price . datetime . strftime ( "%Y-%m-%d" ) , price . value ) sec_svc = SecurityAggregate ( self . book , commodity ) currency = sec_svc . get_currency ( ) if currency != price . currency : raise ValueError ( "Requested currency does not match the currency previously used" , currency , price . currency ) new_price = Price ( commodity , currency , price . datetime . date ( ) , price . value , source = "Finance::Quote" ) commodity . prices . append ( new_price ) | Creates a new Price entry in the book for the given commodity |
249,854 | def get_splits_query ( self ) : query = ( self . book . session . query ( Split ) . filter ( Split . transaction_guid == self . transaction . guid ) ) return query | Returns the query for related splits |
249,855 | def generate_report ( book_url , fund_ids : StringOption ( section = "Funds" , sort_tag = "c" , documentation_string = "Comma-separated list of fund ids." , default_value = "8123,8146,8148,8147" ) ) : return render_report ( book_url , fund_ids ) | Generates the report output |
249,856 | def searchAccount ( searchTerm , book ) : print ( "Search results:\n" ) found = False for account in book . accounts : if searchTerm . lower ( ) in account . fullname . lower ( ) : print ( account . fullname ) found = True if not found : print ( "Search term not found in account names." ) | Searches through account names |
249,857 | def display_db_info ( self ) : with self . open_book ( ) as book : default_currency = book . default_currency print ( "Default currency is " , default_currency . mnemonic ) | Displays some basic info about the GnuCash book |
249,858 | def open_book ( self , for_writing = False ) -> piecash . Book : filename = None file_url = urllib . parse . urlparse ( self . filename ) if file_url . scheme == "file" or file_url . scheme == "sqlite" : filename = file_url . path [ 1 : ] else : filename = self . filename if not os . path . isfile ( filename ) : log ( WARN , "Database %s requested but not found. Creating an in-memory book." , filename ) return self . create_book ( ) access_type = "read/write" if for_writing else "readonly" log ( INFO , "Using %s in %s mode." , filename , access_type ) file_path = path . abspath ( filename ) if not for_writing : book = piecash . open_book ( file_path , open_if_lock = True ) else : book = piecash . open_book ( file_path , open_if_lock = True , readonly = False ) return book | Opens the database . Call this using with . If database file is not found an in - memory database will be created . |
249,859 | def read_text_from_file ( path : str ) -> str : with open ( path ) as text_file : content = text_file . read ( ) return content | Reads text file contents |
249,860 | def save_text_to_file ( content : str , path : str ) : with open ( path , mode = 'w' ) as text_file : text_file . write ( content ) | Saves text to file |
249,861 | def get_amount_in_base_currency ( self , currency : str , amount : Decimal ) -> Decimal : assert isinstance ( amount , Decimal ) if currency == self . get_default_currency ( ) . mnemonic : return amount agg = self . get_currency_aggregate_by_symbol ( currency ) if not agg : raise ValueError ( f"Currency not found: {currency}!" ) rate_to_base = agg . get_latest_price ( ) if not rate_to_base : raise ValueError ( f"Latest price not found for {currency}!" ) assert isinstance ( rate_to_base . value , Decimal ) result = amount * rate_to_base . value return result | Calculates the amount in base currency |
249,862 | def get_default_currency ( self ) -> Commodity : result = None if self . default_currency : result = self . default_currency else : def_currency = self . __get_default_currency ( ) self . default_currency = def_currency result = def_currency return result | returns the book default currency |
249,863 | def get_book_currencies ( self ) -> List [ Commodity ] : query = ( self . currencies_query . order_by ( Commodity . mnemonic ) ) return query . all ( ) | Returns currencies used in the book |
249,864 | def get_currency_aggregate_by_symbol ( self , symbol : str ) -> CurrencyAggregate : currency = self . get_by_symbol ( symbol ) result = self . get_currency_aggregate ( currency ) return result | Creates currency aggregate for the given currency symbol |
249,865 | def get_by_symbol ( self , symbol : str ) -> Commodity : assert isinstance ( symbol , str ) query = ( self . currencies_query . filter ( Commodity . mnemonic == symbol ) ) return query . one ( ) | Loads currency by symbol |
249,866 | def import_fx_rates ( self , rates : List [ PriceModel ] ) : have_new_rates = False base_currency = self . get_default_currency ( ) for rate in rates : assert isinstance ( rate , PriceModel ) currency = self . get_by_symbol ( rate . symbol ) amount = rate . value has_rate = currency . prices . filter ( Price . date == rate . datetime . date ( ) ) . first ( ) if not has_rate : log ( INFO , "Creating entry for %s, %s, %s, %s" , base_currency . mnemonic , currency . mnemonic , rate . datetime . date ( ) , amount ) inverted_rate = 1 / amount inverted_rate = inverted_rate . quantize ( Decimal ( '.00000000' ) ) price = Price ( commodity = currency , currency = base_currency , date = rate . datetime . date ( ) , value = str ( inverted_rate ) ) have_new_rates = True if have_new_rates : log ( INFO , "Saving new prices..." ) self . book . flush ( ) self . book . save ( ) else : log ( INFO , "No prices imported." ) | Imports the given prices into database . Write operation! |
249,867 | def __get_default_currency ( self ) : if sys . platform == "win32" : def_curr = self . book [ "default-currency" ] = self . __get_default_currency_windows ( ) else : def_curr = self . book [ "default-currency" ] = self . __get_locale_currency ( ) return def_curr | Read the default currency from GnuCash preferences |
249,868 | def __get_registry_key ( self , key ) : import winreg root = winreg . OpenKey ( winreg . HKEY_CURRENT_USER , r'SOFTWARE\GSettings\org\gnucash\general' , 0 , winreg . KEY_READ ) [ pathname , regtype ] = ( winreg . QueryValueEx ( root , key ) ) winreg . CloseKey ( root ) return pathname | Read currency from windows registry |
249,869 | def get_for_accounts ( self , accounts : List [ Account ] ) : account_ids = [ acc . guid for acc in accounts ] query = ( self . query . filter ( Split . account_guid . in_ ( account_ids ) ) ) splits = query . all ( ) return splits | Get all splits for the given accounts |
249,870 | def __get_model_for_portfolio_value ( input_model : PortfolioValueInputModel ) -> PortfolioValueViewModel : result = PortfolioValueViewModel ( ) result . filter = input_model ref_datum = Datum ( ) ref_datum . from_datetime ( input_model . as_of_date ) ref_date = ref_datum . end_of_day ( ) result . stock_rows = [ ] with BookAggregate ( ) as svc : book = svc . book stocks_svc = svc . securities if input_model . stock : symbols = input_model . stock . split ( "," ) stocks = stocks_svc . get_stocks ( symbols ) else : stocks = stocks_svc . get_all ( ) for stock in stocks : row : StockViewModel = portfoliovalue . get_stock_model_from ( book , stock , as_of_date = ref_date ) if row and row . balance > 0 : result . stock_rows . append ( row ) return result | loads the data for portfolio value |
249,871 | def __load_settings ( self ) : file_path = self . file_path try : self . data = json . load ( open ( file_path ) ) except FileNotFoundError : print ( "Could not load" , file_path ) | Load settings from . json file |
249,872 | def file_exists ( self ) -> bool : cfg_path = self . file_path assert cfg_path return path . isfile ( cfg_path ) | Check if the settings file exists or not |
249,873 | def save ( self ) : content = self . dumps ( ) fileutils . save_text_to_file ( content , self . file_path ) | Saves the settings contents |
249,874 | def database_path ( self ) : filename = self . database_filename db_path = ":memory:" if filename == ":memory:" else ( path . abspath ( path . join ( __file__ , "../.." , ".." , "data" , filename ) ) ) return db_path | Full database path . Includes the default location + the database filename . |
249,875 | def file_path ( self ) -> str : user_dir = self . __get_user_path ( ) file_path = path . abspath ( path . join ( user_dir , self . FILENAME ) ) return file_path | Settings file absolute path |
249,876 | def dumps ( self ) -> str : return json . dumps ( self . data , sort_keys = True , indent = 4 ) | Dumps the json content as a string |
249,877 | def __copy_template ( self ) : import shutil template_filename = "settings.json.template" template_path = path . abspath ( path . join ( __file__ , ".." , ".." , "config" , template_filename ) ) settings_path = self . file_path shutil . copyfile ( template_path , settings_path ) self . __ensure_file_exists ( ) | Copy the settings template into the user s directory |
249,878 | def is_not_empty ( self , value , strict = False ) : value = stringify ( value ) if value is not None : return self . shout ( 'Value %r is empty' , strict , value ) | if value is not empty |
249,879 | def is_numeric ( self , value , strict = False ) : value = stringify ( value ) if value is not None : if value . isnumeric ( ) : return self . shout ( 'value %r is not numeric' , strict , value ) | if value is numeric |
249,880 | def is_integer ( self , value , strict = False ) : if value is not None : if isinstance ( value , numbers . Number ) : return value = stringify ( value ) if value is not None and value . isnumeric ( ) : return self . shout ( 'value %r is not an integer' , strict , value ) | if value is an integer |
249,881 | def match_date ( self , value , strict = False ) : value = stringify ( value ) try : parse ( value ) except Exception : self . shout ( 'Value %r is not a valid date' , strict , value ) | if value is a date |
249,882 | def match_regexp ( self , value , q , strict = False ) : value = stringify ( value ) mr = re . compile ( q ) if value is not None : if mr . match ( value ) : return self . shout ( '%r not matching the regexp %r' , strict , value , q ) | if value matches a regexp q |
249,883 | def has_length ( self , value , q , strict = False ) : value = stringify ( value ) if value is not None : if len ( value ) == q : return self . shout ( 'Value %r not matching length %r' , strict , value , q ) | if value has a length of q |
249,884 | def must_contain ( self , value , q , strict = False ) : if value is not None : if value . find ( q ) != - 1 : return self . shout ( 'Value %r does not contain %r' , strict , value , q ) | if value must contain q |
249,885 | def extract ( context , data ) : with context . http . rehash ( data ) as result : file_path = result . file_path content_type = result . content_type extract_dir = random_filename ( context . work_path ) if content_type in ZIP_MIME_TYPES : extracted_files = extract_zip ( file_path , extract_dir ) elif content_type in TAR_MIME_TYPES : extracted_files = extract_tar ( file_path , extract_dir , context ) elif content_type in SEVENZIP_MIME_TYPES : extracted_files = extract_7zip ( file_path , extract_dir , context ) else : context . log . warning ( "Unsupported archive content type: %s" , content_type ) return extracted_content_hashes = { } for path in extracted_files : relative_path = os . path . relpath ( path , extract_dir ) content_hash = context . store_file ( path ) extracted_content_hashes [ relative_path ] = content_hash data [ 'content_hash' ] = content_hash data [ 'file_name' ] = relative_path context . emit ( data = data . copy ( ) ) | Extract a compressed file |
249,886 | def size ( cls , crawler ) : key = make_key ( 'queue_pending' , crawler ) return unpack_int ( conn . get ( key ) ) | Total operations pending for this crawler |
249,887 | def read_word ( image , whitelist = None , chars = None , spaces = False ) : from tesserocr import PyTessBaseAPI api = PyTessBaseAPI ( ) api . SetPageSegMode ( 8 ) if whitelist is not None : api . SetVariable ( "tessedit_char_whitelist" , whitelist ) api . SetImage ( image ) api . Recognize ( ) guess = api . GetUTF8Text ( ) if not spaces : guess = '' . join ( [ c for c in guess if c != " " ] ) guess = guess . strip ( ) if chars is not None and len ( guess ) != chars : return guess , None return guess , api . MeanTextConf ( ) | OCR a single word from an image . Useful for captchas . Image should be pre - processed to remove noise etc . |
249,888 | def read_char ( image , whitelist = None ) : from tesserocr import PyTessBaseAPI api = PyTessBaseAPI ( ) api . SetPageSegMode ( 10 ) if whitelist is not None : api . SetVariable ( "tessedit_char_whitelist" , whitelist ) api . SetImage ( image ) api . Recognize ( ) return api . GetUTF8Text ( ) . strip ( ) | OCR a single character from an image . Useful for captchas . |
249,889 | def get ( self , name , default = None ) : value = self . params . get ( name , default ) if isinstance ( value , str ) : value = os . path . expandvars ( value ) return value | Get a configuration value and expand environment variables . |
249,890 | def emit ( self , rule = 'pass' , stage = None , data = { } , delay = None , optional = False ) : if stage is None : stage = self . stage . handlers . get ( rule ) if optional and stage is None : return if stage is None or stage not in self . crawler . stages : self . log . info ( "No next stage: %s (%s)" % ( stage , rule ) ) return state = self . dump_state ( ) delay = delay or self . crawler . delay Queue . queue ( stage , state , data , delay ) | Invoke the next stage either based on a handling rule or by calling the pass rule by default . |
249,891 | def recurse ( self , data = { } , delay = None ) : return self . emit ( stage = self . stage . name , data = data , delay = delay ) | Have a stage invoke itself with a modified set of arguments . |
249,892 | def execute ( self , data ) : if Crawl . is_aborted ( self . crawler , self . run_id ) : return try : Crawl . operation_start ( self . crawler , self . stage , self . run_id ) self . log . info ( '[%s->%s(%s)]: %s' , self . crawler . name , self . stage . name , self . stage . method_name , self . run_id ) return self . stage . method ( self , data ) except Exception as exc : self . emit_exception ( exc ) finally : Crawl . operation_end ( self . crawler , self . run_id ) shutil . rmtree ( self . work_path ) | Execute the crawler and create a database record of having done so . |
249,893 | def skip_incremental ( self , * criteria ) : if not self . incremental : return False key = make_key ( * criteria ) if key is None : return False if self . check_tag ( key ) : return True self . set_tag ( key , None ) return False | Perform an incremental check on a set of criteria . |
249,894 | def store_data ( self , data , encoding = 'utf-8' ) : path = random_filename ( self . work_path ) try : with open ( path , 'wb' ) as fh : if isinstance ( data , str ) : data = data . encode ( encoding ) if data is not None : fh . write ( data ) return self . store_file ( path ) finally : try : os . unlink ( path ) except OSError : pass | Put the given content into a file possibly encoding it as UTF - 8 in the process . |
249,895 | def check_due ( self ) : if self . disabled : return False if self . is_running : return False if self . delta is None : return False last_run = self . last_run if last_run is None : return True now = datetime . utcnow ( ) if now > last_run + self . delta : return True return False | Check if the last execution of this crawler is older than the scheduled interval . |
249,896 | def flush ( self ) : Queue . flush ( self ) Event . delete ( self ) Crawl . flush ( self ) | Delete all run - time data generated by this crawler . |
249,897 | def run ( self , incremental = None , run_id = None ) : state = { 'crawler' : self . name , 'run_id' : run_id , 'incremental' : settings . INCREMENTAL } if incremental is not None : state [ 'incremental' ] = incremental self . cancel ( ) Event . delete ( self ) Queue . queue ( self . init_stage , state , { } ) | Queue the execution of a particular crawler . |
249,898 | def fetch ( context , data ) : url = data . get ( 'url' ) attempt = data . pop ( 'retry_attempt' , 1 ) try : result = context . http . get ( url , lazy = True ) rules = context . get ( 'rules' , { 'match_all' : { } } ) if not Rule . get_rule ( rules ) . apply ( result ) : context . log . info ( 'Fetch skip: %r' , result . url ) return if not result . ok : err = ( result . url , result . status_code ) context . emit_warning ( "Fetch fail [%s]: HTTP %s" % err ) if not context . params . get ( 'emit_errors' , False ) : return else : context . log . info ( "Fetched [%s]: %r" , result . status_code , result . url ) data . update ( result . serialize ( ) ) if url != result . url : tag = make_key ( context . run_id , url ) context . set_tag ( tag , None ) context . emit ( data = data ) except RequestException as ce : retries = int ( context . get ( 'retry' , 3 ) ) if retries >= attempt : context . log . warn ( "Retry: %s (error: %s)" , url , ce ) data [ 'retry_attempt' ] = attempt + 1 context . recurse ( data = data , delay = 2 ** attempt ) else : context . emit_warning ( "Fetch fail [%s]: %s" % ( url , ce ) ) | Do an HTTP GET on the url specified in the inbound data . |
249,899 | def dav_index ( context , data ) : url = data . get ( 'url' ) result = context . http . request ( 'PROPFIND' , url ) for resp in result . xml . findall ( './{DAV:}response' ) : href = resp . findtext ( './{DAV:}href' ) if href is None : continue rurl = urljoin ( url , href ) rdata = data . copy ( ) rdata [ 'url' ] = rurl rdata [ 'foreign_id' ] = rurl if rdata [ 'url' ] == url : continue if resp . find ( './/{DAV:}collection' ) is not None : rdata [ 'parent_foreign_id' ] = rurl context . log . info ( "Fetching contents of folder: %s" % rurl ) context . recurse ( data = rdata ) else : rdata [ 'parent_foreign_id' ] = url fetch ( context , rdata ) | List files in a WebDAV directory . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.