code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def required_get_and_update(self, name, default=None): '''a wrapper to get_and_update, but if not successful, will print an error and exit. ''' setting = self._get_and_update_setting(name, default=None) if setting in [None, ""]: bot.exit('You must export %s' % name) return setting
a wrapper to get_and_update, but if not successful, will print an error and exit.
def parse_rest_doc(doc): """ Extract the headers, delimiters, and text from reST-formatted docstrings. Parameters ---------- doc: Union[str, None] Returns ------- Dict[str, Section] """ class Section(object): def __init__(self, header=None, body=None): ...
Extract the headers, delimiters, and text from reST-formatted docstrings. Parameters ---------- doc: Union[str, None] Returns ------- Dict[str, Section]
def get_translation_objects(self, request, language_code, obj=None, inlines=True): """ Return all objects that should be deleted when a translation is deleted. This method can yield all QuerySet objects or lists for the objects. """ if obj is not None: # A single mode...
Return all objects that should be deleted when a translation is deleted. This method can yield all QuerySet objects or lists for the objects.
def convert_http_request(request, referrer_host=None): '''Convert a HTTP request. Args: request: An instance of :class:`.http.request.Request`. referrer_host (str): The referrering hostname or IP address. Returns: Request: An instance of :class:`urllib.request.Request` ''' ...
Convert a HTTP request. Args: request: An instance of :class:`.http.request.Request`. referrer_host (str): The referrering hostname or IP address. Returns: Request: An instance of :class:`urllib.request.Request`
def create_scratch_org(self, org_name, config_name, days=None, set_password=True): """ Adds/Updates a scratch org config to the keychain from a named config """ scratch_config = getattr( self.project_config, "orgs__scratch__{}".format(config_name) ) if days is not None: ...
Adds/Updates a scratch org config to the keychain from a named config
def cli( paths, dbname, separator, quoting, skip_errors, replace_tables, table, extract_column, date, datetime, datetime_format, primary_key, fts, index, shape, filename_column, no_index_fks, no_fulltext_fks, ): """ PATHS: paths to individu...
PATHS: paths to individual .csv files or to directories containing .csvs DBNAME: name of the SQLite database file to create
def get_xritdecompress_outfile(stdout): """Analyse the output of the xRITDecompress command call and return the file.""" outfile = b'' for line in stdout: try: k, v = [x.strip() for x in line.split(b':', 1)] except ValueError: break if k == b'Decompressed file...
Analyse the output of the xRITDecompress command call and return the file.
def profile_detail( request, username, template_name=accounts_settings.ACCOUNTS_PROFILE_DETAIL_TEMPLATE, extra_context=None, **kwargs): """ Detailed view of an user. :param username: String of the username of which the profile should be viewed. :param template_name: String ...
Detailed view of an user. :param username: String of the username of which the profile should be viewed. :param template_name: String representing the template name that should be used to display the profile. :param extra_context: Dictionary of variables which should be su...
def get_datetime(self, timestamp: str, unix=True): """Converts a %Y%m%dT%H%M%S.%fZ to a UNIX timestamp or a datetime.datetime object Parameters --------- timestamp: str A timstamp in the %Y%m%dT%H%M%S.%fZ format, usually returned by the API in the ``creat...
Converts a %Y%m%dT%H%M%S.%fZ to a UNIX timestamp or a datetime.datetime object Parameters --------- timestamp: str A timstamp in the %Y%m%dT%H%M%S.%fZ format, usually returned by the API in the ``created_time`` field for example (eg. 20180718T145906.000Z) ...
def generate_index(self, schemas): '''Generates html for an index file''' params = {'schemas': sorted(schemas, key=lambda x: x.object_name), 'project': self.project_name, 'title': '{}: Database schema documentation'\ .format(self.project_name)} ...
Generates html for an index file
def trigger(self, events, *args, **kwargs): """ Fires the given *events* (string or list of strings). All callbacks associated with these *events* will be called and if their respective objects have a *times* value set it will be used to determine when to remove the associated c...
Fires the given *events* (string or list of strings). All callbacks associated with these *events* will be called and if their respective objects have a *times* value set it will be used to determine when to remove the associated callback from the event. If given, callbacks associated ...
def Bankoff(m, x, rhol, rhog, mul, mug, D, roughness=0, L=1): r'''Calculates two-phase pressure drop with the Bankoff (1960) correlation, as shown in [2]_, [3]_, and [4]_. .. math:: \Delta P_{tp} = \phi_{l}^{7/4} \Delta P_{l} .. math:: \phi_l = \frac{1}{1-x}\left[1 - \gamma\left(1 - \f...
r'''Calculates two-phase pressure drop with the Bankoff (1960) correlation, as shown in [2]_, [3]_, and [4]_. .. math:: \Delta P_{tp} = \phi_{l}^{7/4} \Delta P_{l} .. math:: \phi_l = \frac{1}{1-x}\left[1 - \gamma\left(1 - \frac{\rho_g}{\rho_l} \right)\right]^{3/7}\left[1 + x\left(\...
def get_file_uuid(fpath, hasher=None, stride=1): """ Creates a uuid from the hash of a file """ if hasher is None: hasher = hashlib.sha1() # 20 bytes of output #hasher = hashlib.sha256() # 32 bytes of output # sha1 produces a 20 byte hash hashbytes_20 = get_file_hash(fpath, hasher=...
Creates a uuid from the hash of a file
def transform(self, X, perplexity=5, initialization="median", k=25, learning_rate=1, n_iter=100, exaggeration=2, momentum=0): """Embed new points into the existing embedding. This procedure optimizes each point only with respect to the existing embedding i.e. it ignores any in...
Embed new points into the existing embedding. This procedure optimizes each point only with respect to the existing embedding i.e. it ignores any interactions between the points in ``X`` among themselves. Please see the :ref:`parameter-guide` for more information. Parameters ...
def draw_beam(ax, p1, p2, width=0, beta1=None, beta2=None, format=None, **kwds): r"""Draw a laser beam.""" if format is None: format = 'k-' if width == 0: x0 = [p1[0], p2[0]] y0 = [p1[1], p2[1]] ax.plot(x0, y0, format, **kwds) else: a = width/2 ...
r"""Draw a laser beam.
def use_comparative_sequence_rule_enabler_rule_view(self): """Pass through to provider SequenceRuleEnablerRuleLookupSession.use_comparative_sequence_rule_enabler_rule_view""" self._object_views['sequence_rule_enabler_rule'] = COMPARATIVE # self._get_provider_session('sequence_rule_enabler_rule_l...
Pass through to provider SequenceRuleEnablerRuleLookupSession.use_comparative_sequence_rule_enabler_rule_view
def from_gps(cls, gps, Name = None): """ Instantiate a Time element initialized to the value of the given GPS time. The Name attribute will be set to the value of the Name parameter if given. Note: the new Time element holds a reference to the GPS time, not a copy of it. Subsequent modification of the G...
Instantiate a Time element initialized to the value of the given GPS time. The Name attribute will be set to the value of the Name parameter if given. Note: the new Time element holds a reference to the GPS time, not a copy of it. Subsequent modification of the GPS time object will be reflected in what ge...
def heightmap_get_interpolated_value( hm: np.ndarray, x: float, y: float ) -> float: """Return the interpolated height at non integer coordinates. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. x (float): A floating point x coordinate. y (float): A floa...
Return the interpolated height at non integer coordinates. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. x (float): A floating point x coordinate. y (float): A floating point y coordinate. Returns: float: The value at ``x``, ``y``.
def wait_for_keys_modified(self, *keys, modifiers_to_check=_mod_keys, timeout=0): """The same as wait_for_keys, but returns a frozen_set which contains the pressed key, and the modifier keys. :param modifiers_to_check: iterable of modifiers for which the function...
The same as wait_for_keys, but returns a frozen_set which contains the pressed key, and the modifier keys. :param modifiers_to_check: iterable of modifiers for which the function will check whether they are pressed :type modifiers: Iterable[int]
def basename(path): """Rightmost part of path after separator.""" base_path = path.strip(SEP) sep_ind = base_path.rfind(SEP) if sep_ind < 0: return path return base_path[sep_ind + 1:]
Rightmost part of path after separator.
async def api_bikes(request): """ Gets stolen bikes within a radius of a given postcode. :param request: The aiohttp request. :return: The bikes stolen with the given range from a postcode. """ postcode: Optional[str] = request.match_info.get('postcode', None) try: radius = int(requ...
Gets stolen bikes within a radius of a given postcode. :param request: The aiohttp request. :return: The bikes stolen with the given range from a postcode.
def purchase_time(self): """Date and time of app purchase. :rtype: datetime """ ts = self._iface.get_purchase_time(self.app_id) return datetime.utcfromtimestamp(ts)
Date and time of app purchase. :rtype: datetime
def get_participants(self, namespace, room): """Return an iterable with the active participants in a room.""" for sid, active in six.iteritems(self.rooms[namespace][room].copy()): yield sid
Return an iterable with the active participants in a room.
def monitor(self): """ Access the Monitor Twilio Domain :returns: Monitor Twilio Domain :rtype: twilio.rest.monitor.Monitor """ if self._monitor is None: from twilio.rest.monitor import Monitor self._monitor = Monitor(self) return self._mo...
Access the Monitor Twilio Domain :returns: Monitor Twilio Domain :rtype: twilio.rest.monitor.Monitor
def async_make_reply(msgname, types, arguments_future, major): """Wrap future that will resolve with arguments needed by make_reply().""" arguments = yield arguments_future raise gen.Return(make_reply(msgname, types, arguments, major))
Wrap future that will resolve with arguments needed by make_reply().
def delete(self): """Reverts all files in this changelist then deletes the changelist from perforce""" try: self.revert() except errors.ChangelistError: pass self._connection.run(['change', '-d', str(self._change)])
Reverts all files in this changelist then deletes the changelist from perforce
def visit_For(self, node): """ Handle iterate variable in for loops. >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse(''' ... def foo(): ... a = b = c = 2 ... for i in __builtin__.range(1): ... a -= ...
Handle iterate variable in for loops. >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse(''' ... def foo(): ... a = b = c = 2 ... for i in __builtin__.range(1): ... a -= 1 ... b += 1''') ...
def _split(string, splitters): """Splits a string into parts at multiple characters""" part = '' for character in string: if character in splitters: yield part part = '' else: part += character yield part
Splits a string into parts at multiple characters
def noise_op(latents, hparams): """Adds isotropic gaussian-noise to each latent. Args: latents: 4-D or 5-D tensor, shape=(NTHWC) or (NHWC). hparams: HParams. Returns: latents: latents with isotropic gaussian noise appended. """ if hparams.latent_noise == 0 or hparams.mode != tf.estimator.ModeKeys...
Adds isotropic gaussian-noise to each latent. Args: latents: 4-D or 5-D tensor, shape=(NTHWC) or (NHWC). hparams: HParams. Returns: latents: latents with isotropic gaussian noise appended.
def parallel_assimilate(self, rootpath): """ Assimilate the entire subdirectory structure in rootpath. """ logger.info('Scanning for valid paths...') valid_paths = [] for (parent, subdirs, files) in os.walk(rootpath): valid_paths.extend(self._drone.get_valid_p...
Assimilate the entire subdirectory structure in rootpath.
def start(self): """ Given the pipeline topology starts ``Pipers`` in the order input -> output. See ``Piper.start``. ``Pipers`` instances are started in two stages, which allows them to share ``NuMaps``. """ # top - > bottom of pipeline pipers = self.p...
Given the pipeline topology starts ``Pipers`` in the order input -> output. See ``Piper.start``. ``Pipers`` instances are started in two stages, which allows them to share ``NuMaps``.
def patch(self, overrides): """ Patches the config with the given overrides. Example: If the current dictionary looks like this: a: 1, b: { c: 3, d: 4 } and `patch` is called with the following overrides: b: { ...
Patches the config with the given overrides. Example: If the current dictionary looks like this: a: 1, b: { c: 3, d: 4 } and `patch` is called with the following overrides: b: { d: 2, e: 4 }, c: 5 ...
def main(): """ Main entry point. """ import jsonschema parser = argparse.ArgumentParser(version=__version__) parser.add_argument( '--verbose', action='store_true', default=False, help='Turn on verbose output.') parser.add_argument( '--header', action='store_true', de...
Main entry point.
def join_syllables_spaces(syllables: List[str], spaces: List[int]) -> str: """ Given a list of syllables, and a list of integers indicating the position of spaces, return a string that has a space inserted at the designated points. :param syllables: :param spaces: :return: >>> join_syllabl...
Given a list of syllables, and a list of integers indicating the position of spaces, return a string that has a space inserted at the designated points. :param syllables: :param spaces: :return: >>> join_syllables_spaces(["won", "to", "tree", "dun"], [3, 6, 11]) 'won to tree dun'
def lookup(self, domain_name, validate=True): """ Lookup an existing SimpleDB domain. This differs from :py:meth:`get_domain` in that ``None`` is returned if ``validate`` is ``True`` and no match was found (instead of raising an exception). :param str domain_name: The name of t...
Lookup an existing SimpleDB domain. This differs from :py:meth:`get_domain` in that ``None`` is returned if ``validate`` is ``True`` and no match was found (instead of raising an exception). :param str domain_name: The name of the domain to retrieve :param bool validate: If ``...
def load_all_csvs_to_model(path, model, field_names=None, delimiter=None, batch_len=10000, dialect=None, num_header_rows=1, mode='rUb', strip=True, clear=False, dry_run=True, ignore_errors=True, sort_files=True, recursive=False, ext='', ...
Bulk create database records from all csv files found within a directory.
def create_cells(self, blocks): """Turn the list of blocks into a list of notebook cells.""" cells = [] for block in blocks: if (block['type'] == self.code) and (block['IO'] == 'input'): code_cell = self.create_code_cell(block) cells.append(code_cell) ...
Turn the list of blocks into a list of notebook cells.
def _elapsed(self): """ Returns elapsed time at update. """ self.last_time = time.time() return self.last_time - self.start
Returns elapsed time at update.
def add_done_callback(self, fn): """Add a callback to be executed when the operation is complete. If the operation is not already complete, this will start a helper thread to poll for the status of the operation in the background. Args: fn (Callable[Future]): The callback t...
Add a callback to be executed when the operation is complete. If the operation is not already complete, this will start a helper thread to poll for the status of the operation in the background. Args: fn (Callable[Future]): The callback to execute when the operation ...
def blast_representative_sequence_to_pdb(self, seq_ident_cutoff=0, evalue=0.0001, display_link=False, outdir=None, force_rerun=False): """BLAST the representative protein sequence to the PDB. Saves a raw BLAST result file (XML file). Args: seq_id...
BLAST the representative protein sequence to the PDB. Saves a raw BLAST result file (XML file). Args: seq_ident_cutoff (float, optional): Cutoff results based on percent coverage (in decimal form) evalue (float, optional): Cutoff for the E-value - filters for significant hits. 0.001 is ...
def is_installable_file(path): # type: (PipfileType) -> bool """Determine if a path can potentially be installed""" from packaging import specifiers if isinstance(path, Mapping): path = convert_entry_to_path(path) # If the string starts with a valid specifier operator, test if it is a vali...
Determine if a path can potentially be installed
def _send_command(self, cmd, expect=None): """Send a command to MPlayer. cmd: the command string expect: expect the output starts with a certain string The result, if any, is returned as a string. """ if not self.is_alive: raise NotPlayingError() logg...
Send a command to MPlayer. cmd: the command string expect: expect the output starts with a certain string The result, if any, is returned as a string.
def assign_value(rs, data_type, val, unit_id, name, metadata={}, data_hash=None, user_id=None, source=None): """ Insert or update a piece of data in a scenario. If the dataset is being shared by other resource scenarios, a new dataset is inserted. If the dataset is ONLY bein...
Insert or update a piece of data in a scenario. If the dataset is being shared by other resource scenarios, a new dataset is inserted. If the dataset is ONLY being used by the resource scenario in question, the dataset is updated to avoid unnecessary duplication.
def get_supplier_properties_per_page(self, per_page=1000, page=1, params=None): """ Get supplier properties per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param params: Search parameters. Default: {} :return: list ...
Get supplier properties per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param params: Search parameters. Default: {} :return: list
def ekf_ok(self): """ ``True`` if the EKF status is considered acceptable, ``False`` otherwise (``boolean``). """ # legacy check for dronekit-python for solo # use same check that ArduCopter::system.pde::position_ok() is using if self.armed: return self._ekf_p...
``True`` if the EKF status is considered acceptable, ``False`` otherwise (``boolean``).
def send(self, s): """ Send data to the channel. Returns the number of bytes sent, or 0 if the channel stream is closed. Applications are responsible for checking that all data has been sent: if only some of the data was transmitted, the application needs to attempt delivery of...
Send data to the channel. Returns the number of bytes sent, or 0 if the channel stream is closed. Applications are responsible for checking that all data has been sent: if only some of the data was transmitted, the application needs to attempt delivery of the remaining data. :...
def resort(self, attributeID, isAscending=None): """Sort by one of my specified columns, identified by attributeID """ if isAscending is None: isAscending = self.defaultSortAscending newSortColumn = self.columns[attributeID] if newSortColumn.sortAttribute() is None: ...
Sort by one of my specified columns, identified by attributeID
def replace_namespaced_limit_range(self, name, namespace, body, **kwargs): """ replace the specified LimitRange This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_limit_rang...
replace the specified LimitRange This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_limit_range(name, namespace, body, async_req=True) >>> result = thread.get() :param asyn...
def jira_connection(config): """ Gets a JIRA API connection. If a connection has already been created the existing connection will be returned. """ global _jira_connection if _jira_connection: return _jira_connection else: jira_options = {'server': config.get('jira').get('ur...
Gets a JIRA API connection. If a connection has already been created the existing connection will be returned.
def is_empty(self): """ Test interval emptiness. :return: True if interval is empty, False otherwise. """ return ( self._lower > self._upper or (self._lower == self._upper and (self._left == OPEN or self._right == OPEN)) )
Test interval emptiness. :return: True if interval is empty, False otherwise.
def split(self, text): """Split text into a list of cells.""" import re if re.search('\n\n', text): return text.split('\n\n') elif re.search('\r\n\r\n', text): return text.split('\r\n\r\n') else: LOGGER.error("'%s' does not appear to be a 'srt...
Split text into a list of cells.
def reporter(self): """ Create a .csv file with the strain name, and the number of core genes present/the total number of core genes """ with open(os.path.join(self.reportpath, 'Escherichia_core.csv'), 'w') as report: data = 'Strain,Genes Present/Total\n' for samp...
Create a .csv file with the strain name, and the number of core genes present/the total number of core genes
def value(source, key, ext=COMPLETE): """Extracts value for the specified metadata key from the given extension set. Keyword arguments: source -- string containing MultiMarkdown text ext -- extension bitfield for processing text key -- key to extract """ _MMD_LIB.extract_metadata_value.rest...
Extracts value for the specified metadata key from the given extension set. Keyword arguments: source -- string containing MultiMarkdown text ext -- extension bitfield for processing text key -- key to extract
def wysiwyg_setup(protocol="http", editor_override=None): """ Create the <style> and <script> tags needed to initialize the rich text editor. Create a local django_wysiwyg/includes.html template if you don't want to use Yahoo's CDN """ ctx = { "protocol": protocol, } ctx.update(get...
Create the <style> and <script> tags needed to initialize the rich text editor. Create a local django_wysiwyg/includes.html template if you don't want to use Yahoo's CDN
def add_update_resources(self, resources, ignore_datasetid=False): # type: (List[Union[hdx.data.resource.Resource,Dict,str]], bool) -> None """Add new or update existing resources with new metadata to the dataset Args: resources (List[Union[hdx.data.resource.Resource,Dict,str]]): A ...
Add new or update existing resources with new metadata to the dataset Args: resources (List[Union[hdx.data.resource.Resource,Dict,str]]): A list of either resource ids or resources metadata from either Resource objects or dictionaries ignore_datasetid (bool): Whether to ignore dataset i...
def _init_objgoea(self, pop, assoc): """Run gene ontology enrichment analysis (GOEA).""" propagate_counts = not self.args.no_propagate_counts return GOEnrichmentStudy(pop, assoc, self.godag, propagate_counts=propagate_counts, rela...
Run gene ontology enrichment analysis (GOEA).
def evaluate(self, dataset, metric="auto", missing_value_action='auto', with_predictions=False, options={}, **kwargs): """ Evaluate the model by making predictions of target values and comparing these to actual values. Parameters ---------- dataset : SFr...
Evaluate the model by making predictions of target values and comparing these to actual values. Parameters ---------- dataset : SFrame Dataset in the same format used for training. The columns names and types of the dataset must be the same as that used in traini...
def _clean_features(struct): """Cleans up the features collected in parse_play_details. :struct: Pandas Series of features parsed from details string. :returns: the same dict, but with cleaner features (e.g., convert bools, ints, etc.) """ struct = dict(struct) # First, clean up play type b...
Cleans up the features collected in parse_play_details. :struct: Pandas Series of features parsed from details string. :returns: the same dict, but with cleaner features (e.g., convert bools, ints, etc.)
def retry(n, errors, wait=0.0, logger_name=None): """This is a decorator that retries a function. Tries `n` times and catches a given tuple of `errors`. If the `n` retries are not enough, the error is reraised. If desired `waits` some seconds. Optionally takes a 'logger_name' of a given logger t...
This is a decorator that retries a function. Tries `n` times and catches a given tuple of `errors`. If the `n` retries are not enough, the error is reraised. If desired `waits` some seconds. Optionally takes a 'logger_name' of a given logger to print the caught error.
def deploy_func_between_two_axis_partitions( cls, axis, func, num_splits, len_of_left, kwargs, *partitions ): """Deploy a function along a full axis between two data sets in Ray. Args: axis: The axis to perform the function along. func: The function to perform. ...
Deploy a function along a full axis between two data sets in Ray. Args: axis: The axis to perform the function along. func: The function to perform. num_splits: The number of splits to return (see `split_result_of_axis_func_pandas`). len_of_left: ...
def _realToVisibleColumn(self, text, realColumn): """If \t is used, real position of symbol in block and visible position differs This function converts real to visible """ generator = self._visibleCharPositionGenerator(text) for i in range(realColumn): val = next(gen...
If \t is used, real position of symbol in block and visible position differs This function converts real to visible
def flushTable(self, login, tableName, startRow, endRow, wait): """ Parameters: - login - tableName - startRow - endRow - wait """ self.send_flushTable(login, tableName, startRow, endRow, wait) self.recv_flushTable()
Parameters: - login - tableName - startRow - endRow - wait
def xml2dict(root): """Use functions instead of Class and remove namespace based on: http://stackoverflow.com/questions/2148119 """ output = {} if root.items(): output.update(dict(root.items())) for element in root: if element: if len(element) == 1 or element[0].tag ...
Use functions instead of Class and remove namespace based on: http://stackoverflow.com/questions/2148119
def run_script(scriptfile): '''run a script file''' try: f = open(scriptfile, mode='r') except Exception: return mpstate.console.writeln("Running script %s" % scriptfile) sub = mp_substitute.MAVSubstitute() for line in f: line = line.strip() if line == "" or line....
run a script file
def _bss_image_crit(s_true, e_spat, e_interf, e_artif): """Measurement of the separation quality for a given image in terms of filtered true source, spatial error, interference and artifacts. """ # energy ratios sdr = _safe_db(np.sum(s_true**2), np.sum((e_spat+e_interf+e_artif)**2)) isr = _safe_...
Measurement of the separation quality for a given image in terms of filtered true source, spatial error, interference and artifacts.
def pending_batch_info(self): """Returns a tuple of the current size of the pending batch queue and the current queue limit. """ c_length = ctypes.c_int(0) c_limit = ctypes.c_int(0) self._call( 'pending_batch_info', ctypes.byref(c_length), ...
Returns a tuple of the current size of the pending batch queue and the current queue limit.
def Cpu(): """ Get number of available CPUs """ cpu = 'Unknown' try: cpu = str(multiprocessing.cpu_count()) except Exception as e: # pragma: no cover logger.error("Can't access CPU count' " + str(e)) return cpu
Get number of available CPUs
def _mudraw(buffer, fmt): """Use mupdf draw to rasterize the PDF in the memory buffer""" with NamedTemporaryFile(suffix='.pdf') as tmp_in: tmp_in.write(buffer) tmp_in.seek(0) tmp_in.flush() proc = run( ['mudraw', '-F', fmt, '-o', '-', tmp_in.name], stdout=PIPE, stder...
Use mupdf draw to rasterize the PDF in the memory buffer
def encode_varint(value, write): """ Encode an integer to a varint presentation. See https://developers.google.com/protocol-buffers/docs/encoding?csw=1#varints on how those can be produced. Arguments: value (int): Value to encode write (function): Called per byte that needs ...
Encode an integer to a varint presentation. See https://developers.google.com/protocol-buffers/docs/encoding?csw=1#varints on how those can be produced. Arguments: value (int): Value to encode write (function): Called per byte that needs to be writen Returns: ...
def load_site_config(name): """Load and return site configuration as a dict.""" return _load_config_json( os.path.join( CONFIG_PATH, CONFIG_SITES_PATH, name + CONFIG_EXT ) )
Load and return site configuration as a dict.
def _add_scope_decorations(self, block, start, end): """ Show a scope decoration on the editor widget :param start: Start line :param end: End line """ try: parent = FoldScope(block).parent() except ValueError: parent = None if Tex...
Show a scope decoration on the editor widget :param start: Start line :param end: End line
def importpath(path, error_text=None): """ Import value by specified ``path``. Value can represent module, class, object, attribute or method. If ``error_text`` is not None and import will raise ImproperlyConfigured with user friendly text. """ result = None attrs = [] parts = path....
Import value by specified ``path``. Value can represent module, class, object, attribute or method. If ``error_text`` is not None and import will raise ImproperlyConfigured with user friendly text.
def _show_prompt(self, prompt=None, html=False, newline=True): """ Writes a new prompt at the end of the buffer. Parameters ---------- prompt : str, optional The prompt to show. If not specified, the previous prompt is used. html : bool, optional (default False) ...
Writes a new prompt at the end of the buffer. Parameters ---------- prompt : str, optional The prompt to show. If not specified, the previous prompt is used. html : bool, optional (default False) Only relevant when a prompt is specified. If set, the prompt will ...
def send_frame(self, frame): ''' Queue a frame for sending. Will send immediately if there are no pending synchronous transactions on this connection. ''' if self.closed: if self.close_info and len(self.close_info['reply_text']) > 0: raise ChannelClos...
Queue a frame for sending. Will send immediately if there are no pending synchronous transactions on this connection.
def vertex_colors(self, values): """ Set the colors for each vertex of a mesh This will apply these colors and delete any previously specified color information. Parameters ------------ colors: (len(mesh.vertices), 3), set each face to the color ...
Set the colors for each vertex of a mesh This will apply these colors and delete any previously specified color information. Parameters ------------ colors: (len(mesh.vertices), 3), set each face to the color (len(mesh.vertices), 4), set each face to the color ...
def _irc_upper(self, in_string): """Convert us to our upper-case equivalent, given our std.""" conv_string = self._translate(in_string) if self._upper_trans is not None: conv_string = in_string.translate(self._upper_trans) return str.upper(conv_string)
Convert us to our upper-case equivalent, given our std.
def inject_closure_values(func, **kwargs): """ Returns a new function identical to the previous one except that it acts as though global variables named in `kwargs` have been closed over with the values specified in the `kwargs` dictionary. Works on properties, class/static methods and functions. ...
Returns a new function identical to the previous one except that it acts as though global variables named in `kwargs` have been closed over with the values specified in the `kwargs` dictionary. Works on properties, class/static methods and functions. This can be useful for mocking and other nefarious ...
async def on_raw_329(self, message): """ Channel creation time. """ target, channel, timestamp = message.params if not self.in_channel(channel): return self.channels[channel]['created'] = datetime.datetime.fromtimestamp(int(timestamp))
Channel creation time.
def fast_int( x, key=lambda x: x, _uni=unicodedata.digit, _first_char=POTENTIAL_FIRST_CHAR, ): """ Convert a string to a int quickly, return input as-is if not possible. We don't need to accept all input that the real fast_int accepts because natsort is controlling what is passed to thi...
Convert a string to a int quickly, return input as-is if not possible. We don't need to accept all input that the real fast_int accepts because natsort is controlling what is passed to this function. Parameters ---------- x : str String to attempt to convert to an int. key : callable ...
def max_freq(self, tech_in_nm=130, ffoverhead=None): """ Estimates the max frequency of a block in MHz. :param tech_in_nm: the size of the circuit technology to be estimated (for example, 65 is 65nm and 250 is 0.25um) :param ffoverhead: setup and ff propagation delay in picoseconds ...
Estimates the max frequency of a block in MHz. :param tech_in_nm: the size of the circuit technology to be estimated (for example, 65 is 65nm and 250 is 0.25um) :param ffoverhead: setup and ff propagation delay in picoseconds :return: a number representing an estimate of the max fre...
def get_subject_with_file_validation(jwt_bu64, cert_path): """Same as get_subject_with_local_validation() except that the signing certificate is read from a local PEM file.""" cert_obj = d1_common.cert.x509.deserialize_pem_file(cert_path) return get_subject_with_local_validation(jwt_bu64, cert_obj)
Same as get_subject_with_local_validation() except that the signing certificate is read from a local PEM file.
def decode_index_value(self, index, value): """ Decodes a secondary index value into the correct Python type. :param index: the name of the index :type index: str :param value: the value of the index entry :type value: str :rtype str or int """ if...
Decodes a secondary index value into the correct Python type. :param index: the name of the index :type index: str :param value: the value of the index entry :type value: str :rtype str or int
def _add(self, hostport): """Creates a peer from the hostport and adds it to the peer heap""" peer = self.peer_class( tchannel=self.tchannel, hostport=hostport, on_conn_change=self._update_heap, ) peer.rank = self.rank_calculator.get_rank(peer) ...
Creates a peer from the hostport and adds it to the peer heap
def getSkeletalBoneDataCompressed(self, action, eMotionRange, pvCompressedData, unCompressedSize): """ Reads the state of the skeletal bone data in a compressed form that is suitable for sending over the network. The required buffer size will never exceed ( sizeof(VR_BoneTransform_t)*boneCount +...
Reads the state of the skeletal bone data in a compressed form that is suitable for sending over the network. The required buffer size will never exceed ( sizeof(VR_BoneTransform_t)*boneCount + 2). Usually the size will be much smaller.
def search(self, q, **kw): """Search Gnip for given query, returning deserialized response.""" url = '{base_url}/search/{stream}'.format(**vars(self)) params = { 'q': q, } params.update(self.params) params.update(kw) response = self.session.get(url, ...
Search Gnip for given query, returning deserialized response.
def http_post(url, data=None, opt=opt_default): """ Shortcut for urlopen (POST) + read. We'll probably want to add a nice timeout here later too. """ return _http_request(url, method='POST', data=_marshalled(data), opt=opt)
Shortcut for urlopen (POST) + read. We'll probably want to add a nice timeout here later too.
def _get_error_values(self, startingPercentage, endPercentage, startDate, endDate): """Gets the defined subset of self._errorValues. Both parameters will be correct at this time. :param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0]. ...
Gets the defined subset of self._errorValues. Both parameters will be correct at this time. :param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0]. It represents the value, where the error calculation should be started. 25.0 f...
def utc_datetime(dt=None, local_value=True): """ Convert local datetime and/or datetime without timezone information to UTC datetime with timezone information. :param dt: local datetime to convert. If is None, then system datetime value is used :param local_value: whether dt is a datetime in system timezone or UTC...
Convert local datetime and/or datetime without timezone information to UTC datetime with timezone information. :param dt: local datetime to convert. If is None, then system datetime value is used :param local_value: whether dt is a datetime in system timezone or UTC datetime without timezone information :return: d...
def _preflight_check(desired, fromrepo, **kwargs): ''' Perform platform-specific checks on desired packages ''' if 'pkg.check_db' not in __salt__: return {} ret = {'suggest': {}, 'no_suggest': []} pkginfo = __salt__['pkg.check_db']( *list(desired.keys()), fromrepo=fromrepo, **kwa...
Perform platform-specific checks on desired packages
def bulk_upsert(self, docs, namespace, timestamp): """Update or insert multiple documents into Solr docs may be any iterable """ if self.auto_commit_interval is not None: add_kwargs = { "commit": (self.auto_commit_interval == 0), "commitWithin...
Update or insert multiple documents into Solr docs may be any iterable
def get_processors(processor_cat, prop_defs, data_attr=None): """ reads the prop defs and adds applicable processors for the property Args: processor_cat(str): The category of processors to retreive prop_defs: property defintions as defined by the rdf defintions data_attr: the attr to m...
reads the prop defs and adds applicable processors for the property Args: processor_cat(str): The category of processors to retreive prop_defs: property defintions as defined by the rdf defintions data_attr: the attr to manipulate during processing. Returns: list: a list of pro...
def delete_table_rate_rule_by_id(cls, table_rate_rule_id, **kwargs): """Delete TableRateRule Delete an instance of TableRateRule by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.dele...
Delete TableRateRule Delete an instance of TableRateRule by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.delete_table_rate_rule_by_id(table_rate_rule_id, async=True) >>> result = th...
def load_dsdl(*paths, **args): """ Loads the DSDL files under the given directory/directories, and creates types for each of them in the current module's namespace. If the exclude_dist argument is not present, or False, the DSDL definitions installed with this package will be loaded first. Als...
Loads the DSDL files under the given directory/directories, and creates types for each of them in the current module's namespace. If the exclude_dist argument is not present, or False, the DSDL definitions installed with this package will be loaded first. Also adds entries for all datatype (ID, kind)s...
def acquireConnection(self): """ Get a Connection instance. Parameters: ---------------------------------------------------------------- retval: A ConnectionWrapper instance. NOTE: Caller is responsible for calling the ConnectionWrapper instance's release(...
Get a Connection instance. Parameters: ---------------------------------------------------------------- retval: A ConnectionWrapper instance. NOTE: Caller is responsible for calling the ConnectionWrapper instance's release() method or use it in a context manag...
def run_example(example_coroutine, *extra_args): """Run a hangups example coroutine. Args: example_coroutine (coroutine): Coroutine to run with a connected hangups client and arguments namespace as arguments. extra_args (str): Any extra command line arguments required by the ...
Run a hangups example coroutine. Args: example_coroutine (coroutine): Coroutine to run with a connected hangups client and arguments namespace as arguments. extra_args (str): Any extra command line arguments required by the example.
def end(self): """Get or set the end of the event. | Will return an :class:`Arrow` object. | May be set to anything that :func:`Arrow.get` understands. | If set to a non null value, removes any already existing duration. | Setting to None will have unexpected beh...
Get or set the end of the event. | Will return an :class:`Arrow` object. | May be set to anything that :func:`Arrow.get` understands. | If set to a non null value, removes any already existing duration. | Setting to None will have unexpected behavior if begin...
def _rectangles_to_polygons(df): """ Convert rect data to polygons Paramters --------- df : dataframe Dataframe with *xmin*, *xmax*, *ymin* and *ymax* columns, plus others for aesthetics ... Returns ------- data : dataframe Dataframe with *x* and *y* columns, pl...
Convert rect data to polygons Paramters --------- df : dataframe Dataframe with *xmin*, *xmax*, *ymin* and *ymax* columns, plus others for aesthetics ... Returns ------- data : dataframe Dataframe with *x* and *y* columns, plus others for aesthetics ...
def _hook_xfer_mem(self, uc, access, address, size, value, data): """ Handle memory operations from unicorn. """ assert access in (UC_MEM_WRITE, UC_MEM_READ, UC_MEM_FETCH) if access == UC_MEM_WRITE: self._cpu.write_int(address, value, size * 8) # If client c...
Handle memory operations from unicorn.
def delete_collection_namespaced_service_account(self, namespace, **kwargs): """ delete collection of ServiceAccount This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namesp...
delete collection of ServiceAccount This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_service_account(namespace, async_req=True) >>> result = thread.get() :param...
def do_random(context, seq): """Return a random item from the sequence.""" try: return random.choice(seq) except IndexError: return context.environment.undefined('No random item, sequence was empty.')
Return a random item from the sequence.