code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def import_libs(self, module_names, impl_name): """ Loop through module_names, add has_.... booleans to class set ..._impl to first successful import :param module_names: list of module names to try importing :param impl_name: used in error output if no modules succeed...
Loop through module_names, add has_.... booleans to class set ..._impl to first successful import :param module_names: list of module names to try importing :param impl_name: used in error output if no modules succeed :return: name, module from first successful implementation
def get_default_value(self): """ Return the default value for the parameter. If here is no default value, return None """ if ('default_value' in self.attributes and bool(self.attributes['default_value'].strip())): return self.attributes['default_value'] ...
Return the default value for the parameter. If here is no default value, return None
def refactor(self, symbol, value): """ Args: symbol: value: Returns: None """ if value: self.pset.add(symbol) else: self.pset.remove(symbol)
Args: symbol: value: Returns: None
def start(host='localhost', port=61613, username='', password=''): """Start twisted event loop and the fun should begin... """ StompClientFactory.username = username StompClientFactory.password = password reactor.connectTCP(host, port, StompClientFactory()) reactor.run()
Start twisted event loop and the fun should begin...
def get_leafs(self, name): r""" Get the sub-tree leaf node(s). :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* no...
r""" Get the sub-tree leaf node(s). :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree)
def combine_pdf_as_bytes(pdfs: List[BytesIO]) -> bytes: """Combine PDFs and return a byte-string with the result. Arguments --------- pdfs A list of BytesIO representations of PDFs """ writer = PdfWriter() for pdf in pdfs: writer.addpages(PdfReader(pdf).pages) bio = Byt...
Combine PDFs and return a byte-string with the result. Arguments --------- pdfs A list of BytesIO representations of PDFs
def ratio_value_number_to_time_series_length(self, x): """ As in tsfresh `ratio_value_number_to_time_series_length <https://github.com/blue-yonder/tsfresh/blob/master\ /tsfresh/feature_extraction/feature_calculators.py#L830>`_ Returns a factor which is 1 if all values in the...
As in tsfresh `ratio_value_number_to_time_series_length <https://github.com/blue-yonder/tsfresh/blob/master\ /tsfresh/feature_extraction/feature_calculators.py#L830>`_ Returns a factor which is 1 if all values in the time series occur only once, and below one if this is not the case...
def requires_public_key(func): """ Decorator for functions that require the public key to be defined. By definition, this includes the private key, as such, it's enough to use this to effect definition of both public and private key. """ def func_wrapper(self, *args, **kwargs): if hasattr(self,...
Decorator for functions that require the public key to be defined. By definition, this includes the private key, as such, it's enough to use this to effect definition of both public and private key.
def footprints_from_point(point, distance, footprint_type='building', retain_invalid=False): """ Get footprints within some distance north, south, east, and west of a lat-long point. Parameters ---------- point : tuple a lat-long point distance : numeric distance in meters ...
Get footprints within some distance north, south, east, and west of a lat-long point. Parameters ---------- point : tuple a lat-long point distance : numeric distance in meters footprint_type : string type of footprint to be downloaded. OSM tag key e.g. 'building', 'land...
def _jx_expression(expr, lang): """ WRAP A JSON EXPRESSION WITH OBJECT REPRESENTATION """ if is_expression(expr): # CONVERT TO lang new_op = lang[expr.id] if not new_op: # CAN NOT BE FOUND, TRY SOME PARTIAL EVAL return language[expr.id].partial_eval() ...
WRAP A JSON EXPRESSION WITH OBJECT REPRESENTATION
def vdp_vlan_change_internal(self, vsw_cb_data, vdp_vlan, fail_reason): """Callback Function from VDP when provider VLAN changes. This will be called only during error cases when switch reloads or when compute reloads. """ LOG.debug("In VDP VLAN change VLAN %s", vdp_vlan) ...
Callback Function from VDP when provider VLAN changes. This will be called only during error cases when switch reloads or when compute reloads.
def _put_file(self, file): """Send PUT request to S3 with file contents""" post_params = { 'file_size': file.size, 'file_hash': file.md5hash(), 'content_type': self._get_content_type(file), } headers = self._request_headers('PUT', file.prefixed_name...
Send PUT request to S3 with file contents
def reduce(self, colors): """Converts color codes into optimized text This optimizer works by merging adjacent colors so we don't have to repeat the same escape codes for each pixel. There is no loss of information. :param colors: Iterable yielding an xterm color code for each...
Converts color codes into optimized text This optimizer works by merging adjacent colors so we don't have to repeat the same escape codes for each pixel. There is no loss of information. :param colors: Iterable yielding an xterm color code for each pixel, None t...
def as_graph(self) -> Digraph: # pragma: no cover """Renders the discrimination net as graphviz digraph.""" if Digraph is None: raise ImportError('The graphviz package is required to draw the graph.') dot = Digraph() nodes = set() queue = [self._root] while ...
Renders the discrimination net as graphviz digraph.
def get_adjacency_matrix(self, fmt='coo'): r""" Returns an adjacency matrix in the specified sparse format, with 1's indicating the non-zero values. Parameters ---------- fmt : string, optional The sparse storage format to return. Options are: *...
r""" Returns an adjacency matrix in the specified sparse format, with 1's indicating the non-zero values. Parameters ---------- fmt : string, optional The sparse storage format to return. Options are: **'coo'** : (default) This is the native format of O...
def list_variables(self): """ List available variables and applies any filters. """ station_codes = self._get_station_codes() station_codes = self._apply_features_filter(station_codes) variables = self._list_variables(station_codes) if hasattr(self, "_variables")...
List available variables and applies any filters.
def create_extended_model(model, db_penalty=None, ex_penalty=None, tp_penalty=None, penalties=None): """Create an extended model for gap-filling. Create a :class:`psamm.metabolicmodel.MetabolicModel` with all reactions added (the reaction database in the model is taken to be t...
Create an extended model for gap-filling. Create a :class:`psamm.metabolicmodel.MetabolicModel` with all reactions added (the reaction database in the model is taken to be the universal database) and also with artificial exchange and transport reactions added. Return the extended :class:`psamm.meta...
def sort_by_speedup(self, reverse=True): """Sort the configurations in place. items with highest speedup come first""" self._confs.sort(key=lambda c: c.speedup, reverse=reverse) return self
Sort the configurations in place. items with highest speedup come first
def _getHead(self, branch): """Return a deferred for branch head revision or None. We'll get an error if there is no head for this branch, which is probably a good thing, since it's probably a misspelling (if really buildbotting a branch that does not have any changeset yet, one...
Return a deferred for branch head revision or None. We'll get an error if there is no head for this branch, which is probably a good thing, since it's probably a misspelling (if really buildbotting a branch that does not have any changeset yet, one shouldn't be surprised to get errors)
def _run_events(self, tag, stage=None): """Run tests marked with a particular tag and stage""" self._run_event_methods(tag, stage) self._run_tests(tag, stage)
Run tests marked with a particular tag and stage
def search(self, query_string, **kwargs): """ The main search method :param query_string: The string to pass to Elasticsearch. e.g. '*:*' :param kwargs: start_offset, end_offset, result_class :return: result_class instance """ self.index_name = self._index_name_fo...
The main search method :param query_string: The string to pass to Elasticsearch. e.g. '*:*' :param kwargs: start_offset, end_offset, result_class :return: result_class instance
def weeks_per_year(year): '''Number of ISO weeks in a year''' # 53 weeks: any year starting on Thursday and any leap year starting on Wednesday jan1 = jwday(gregorian.to_jd(year, 1, 1)) if jan1 == THU or (jan1 == WED and isleap(year)): return 53 else: return 52
Number of ISO weeks in a year
def _process_op_err(self, e): """ Process errors which occured while reading or parsing the protocol. If allow_reconnect is enabled it will try to switch the server to which it is currently connected otherwise it will disconnect. """ if self.is_connecting or self....
Process errors which occured while reading or parsing the protocol. If allow_reconnect is enabled it will try to switch the server to which it is currently connected otherwise it will disconnect.
def RawData(self): """Yields the valus in each section.""" result = collections.OrderedDict() i = 0 while True: try: name, value, value_type = winreg.EnumValue(self._AccessRootKey(), i) # Only support strings here. if value_type == winreg.REG_SZ: precondition.Ass...
Yields the valus in each section.
def top_k_logits(logits, k): """ Masks everything but the k top entries as -infinity (1e10). Used to mask logits such that e^-infinity -> 0 won't contribute to the sum of the denominator. """ if k == 0: return logits else: values = torch.topk(logits, k)[0] batch_mins ...
Masks everything but the k top entries as -infinity (1e10). Used to mask logits such that e^-infinity -> 0 won't contribute to the sum of the denominator.
def _get_notify_msg_and_payload(result, stream): """Get notify message and payload dict""" token = stream.advance_past_chars(["=", "*"]) token = int(token) if token != "" else None logger.debug("%s", fmt_green("parsing message")) message = stream.advance_past_chars([","]) logger.debug("parsed m...
Get notify message and payload dict
def ExportNEP2(self, passphrase): """ Export the encrypted private key in NEP-2 format. Args: passphrase (str): The password to encrypt the private key with, as unicode string Returns: str: The NEP-2 encrypted private key """ if len(passphrase) <...
Export the encrypted private key in NEP-2 format. Args: passphrase (str): The password to encrypt the private key with, as unicode string Returns: str: The NEP-2 encrypted private key
def sg_float(tensor, opt): r"""Casts a tensor to floatx. See `tf.cast()` in tensorflow. Args: tensor: A `Tensor` or `SparseTensor` (automatically given by chain). opt: name : If provided, it replaces current tensor's name Returns: A `Tensor` or `SparseTensor` with same s...
r"""Casts a tensor to floatx. See `tf.cast()` in tensorflow. Args: tensor: A `Tensor` or `SparseTensor` (automatically given by chain). opt: name : If provided, it replaces current tensor's name Returns: A `Tensor` or `SparseTensor` with same shape as `tensor`.
def submit(args): gpus = args.gpus.strip().split(',') """Submit function of local jobs.""" def mthread_submit(nworker, nserver, envs): """ customized submit script, that submit nslave jobs, each must contain args as parameter note this can be a lambda function containing additional p...
Submit function of local jobs.
def get_playlist_songs(self, playlist_id, limit=1000): """Get a playlists's all songs. :params playlist_id: playlist id. :params limit: length of result returned by weapi. :return: a list of Song object. """ url = 'http://music.163.com/weapi/v3/playlist/detail?csrf_toke...
Get a playlists's all songs. :params playlist_id: playlist id. :params limit: length of result returned by weapi. :return: a list of Song object.
def Approval(self, username, approval_id): """Returns a reference to an approval.""" return ClientApprovalRef( client_id=self.client_id, username=username, approval_id=approval_id, context=self._context)
Returns a reference to an approval.
def unicode_urlencode(query, doseq=True): """ Custom wrapper around urlencode to support unicode Python urlencode doesn't handle unicode well so we need to convert to bytestrings before using it: http://stackoverflow.com/questions/6480723/urllib-urlencode-doesnt-like-unicode-values-how-about-this-w...
Custom wrapper around urlencode to support unicode Python urlencode doesn't handle unicode well so we need to convert to bytestrings before using it: http://stackoverflow.com/questions/6480723/urllib-urlencode-doesnt-like-unicode-values-how-about-this-workaround
def clean_global_runtime_state(reset_subsystem=False): """Resets the global runtime state of a pants runtime for cleaner forking. :param bool reset_subsystem: Whether or not to clean Subsystem global state. """ if reset_subsystem: # Reset subsystem state. Subsystem.reset() # Reset Goals and Tasks. ...
Resets the global runtime state of a pants runtime for cleaner forking. :param bool reset_subsystem: Whether or not to clean Subsystem global state.
def reasonable_desired_version(self, desired_version, allow_equal=False, allow_patch_skip=False): """ Determine whether the desired version is a reasonable next version. Parameters ---------- desired_version: str the proposed next ve...
Determine whether the desired version is a reasonable next version. Parameters ---------- desired_version: str the proposed next version name
def record(self): # type: () -> bytes ''' A method to generate a string representing this El Torito Entry. Parameters: None. Returns: String representing this El Torito Entry. ''' if not self._initialized: raise pycdlibexception.PyCd...
A method to generate a string representing this El Torito Entry. Parameters: None. Returns: String representing this El Torito Entry.
def verify_constraints(constraints): """ Verify values returned from :meth:`make_constraints`. Used internally during the :meth:`build` process. :param constraints: value returned from :meth:`make_constraints` :type constraints: :class:`list` :raises ValueError: if veri...
Verify values returned from :meth:`make_constraints`. Used internally during the :meth:`build` process. :param constraints: value returned from :meth:`make_constraints` :type constraints: :class:`list` :raises ValueError: if verification fails
def _fetch_and_parse_messages(self, mailing_list, from_date): """Fetch and parse the messages from a mailing list""" from_date = datetime_to_utc(from_date) nmsgs, imsgs, tmsgs = (0, 0, 0) for mbox in mailing_list.mboxes: tmp_path = None try: tm...
Fetch and parse the messages from a mailing list
def read_interoperability_ifd(fh, byteorder, dtype, count, offsetsize): """Read Interoperability tags from file and return as dict.""" tag_names = {1: 'InteroperabilityIndex'} return read_tags(fh, byteorder, offsetsize, tag_names, maxifds=1)
Read Interoperability tags from file and return as dict.
def make_while_loop(test_and_body_instrs, else_body_instrs, context): """ Make an ast.While node. Parameters ---------- test_and_body_instrs : deque Queue of instructions forming the loop test expression and body. else_body_instrs : deque Queue of instructions forming the else b...
Make an ast.While node. Parameters ---------- test_and_body_instrs : deque Queue of instructions forming the loop test expression and body. else_body_instrs : deque Queue of instructions forming the else block of the loop. context : DecompilationContext
def set_aromatic(self): """set the cycle to be an aromatic ring""" #XXX FIX ME # this probably shouldn't be here for atom in self.atoms: atom.aromatic = 1 for bond in self.bonds: bond.aromatic = 1 bond.bondorder = 1.5 b...
set the cycle to be an aromatic ring
def MessageSetItemDecoder(extensions_by_number): """Returns a decoder for a MessageSet item. The parameter is the _extensions_by_number map for the message class. The message set message looks like this: message MessageSet { repeated group Item = 1 { required int32 type_id = 2; require...
Returns a decoder for a MessageSet item. The parameter is the _extensions_by_number map for the message class. The message set message looks like this: message MessageSet { repeated group Item = 1 { required int32 type_id = 2; required string message = 3; } }
def _fetchAllChildren(self): """ Fetches all sub groups and variables that this group contains. """ assert self._h5Group is not None, "dataset undefined (file not opened?)" assert self.canFetchChildren(), "canFetchChildren must be True" childItems = [] for childName, h5...
Fetches all sub groups and variables that this group contains.
def _schema_nodes(self): """parse self._ontology_file into a graph""" name, ext = os.path.splitext(self._ontology_file) if ext in ['.ttl']: self._ontology_parser_function = \ lambda s: rdflib.Graph().parse(s, format='n3') else: self._ontology_parse...
parse self._ontology_file into a graph
def get(self, sid): """ Constructs a MessageContext :param sid: The unique string that identifies the resource :returns: twilio.rest.chat.v2.service.channel.message.MessageContext :rtype: twilio.rest.chat.v2.service.channel.message.MessageContext """ return Mess...
Constructs a MessageContext :param sid: The unique string that identifies the resource :returns: twilio.rest.chat.v2.service.channel.message.MessageContext :rtype: twilio.rest.chat.v2.service.channel.message.MessageContext
def liste_stations(self, station=None, detail=False): """ Liste des stations Paramètres: station : un nom de station valide (si vide, liste toutes les stations) detail : si True, affiche plus de détail sur la (les) station(s). """ condition = "" if stati...
Liste des stations Paramètres: station : un nom de station valide (si vide, liste toutes les stations) detail : si True, affiche plus de détail sur la (les) station(s).
def _wrap_paginated_response(cls, request, response, controls, data, head=None): """Builds the metadata for a pagingated response and wraps everying in a JSON encoded web.Response """ paging_response = response['paging'] if head is None: ...
Builds the metadata for a pagingated response and wraps everying in a JSON encoded web.Response
def plot_polynomial( log, title, polynomialDict, orginalDataDictionary=False, pathToOutputPlotsFolder="~/Desktop", xRange=False, xlabel=False, ylabel=False, xAxisLimits=False, yAxisLimits=False, yAxisInvert=False, prependNum...
*Plot a dictionary of numpy lightcurves polynomials* **Key Arguments:** - ``log`` -- logger - ``title`` -- title for the plot - ``polynomialDict`` -- dictionary of polynomials { label01 : poly01, label02 : poly02 } - ``orginalDataDictionary`` -- the orginal data points {name: [x, y]...
def update_progress_bar( go, optext, start, total_files, files_sofar, total_bytes, bytes_sofar, stdin_upload=False): # type: (blobxfer.models.options.General, str, datetime.datetime, int, # int, int, int, bool) -> None """Update the progress bar :param blobxfer.models.options.Gene...
Update the progress bar :param blobxfer.models.options.General go: general options :param str optext: operation prefix text :param datetime.datetime start: start time :param int total_files: total number of files :param int files_sofar: files transfered so far :param int total_bytes: total numbe...
def import_file_object(filename): """ Summary: Imports block filesystem object Args: :filename (str): block filesystem object Returns: dictionary obj (valid json file), file data object """ try: handle = open(filename, 'r') file_obj = handle.read() ...
Summary: Imports block filesystem object Args: :filename (str): block filesystem object Returns: dictionary obj (valid json file), file data object
def downloadMARCXML(doc_id, library, base="nkc"): """ Download MARC XML document with given `doc_id` from given `library`. Args: doc_id (DocumentID): You will get this from :func:`getDocumentIDs`. library (str): "``NKC01``" in our case, but don't worry, :func:`getDocument...
Download MARC XML document with given `doc_id` from given `library`. Args: doc_id (DocumentID): You will get this from :func:`getDocumentIDs`. library (str): "``NKC01``" in our case, but don't worry, :func:`getDocumentIDs` adds library specification into :class...
def get_caller_module(): """ Returns the name of the caller's module as a string. >>> get_caller_module() '__main__' """ stack = inspect.stack() assert len(stack) > 1 caller = stack[2][0] return caller.f_globals['__name__']
Returns the name of the caller's module as a string. >>> get_caller_module() '__main__'
def paginate(self, request, offset=0, limit=None): """Paginate queryset.""" return self.collection.offset(offset).limit(limit), self.collection.count()
Paginate queryset.
def rename(self, **mapping): """ The rename method allows stream parameters to be allocated to new names to avoid clashes with other stream parameters of the same name. Returns a new clone of the stream instance with the specified name mapping. """ params = {k: v ...
The rename method allows stream parameters to be allocated to new names to avoid clashes with other stream parameters of the same name. Returns a new clone of the stream instance with the specified name mapping.
def identify_missing(self, df, check_start=True): """ Identify missing data. Parameters ---------- df : pd.DataFrame() Dataframe to check for missing data. check_start : bool turns 0 to 1 for the first observation, to display the start of...
Identify missing data. Parameters ---------- df : pd.DataFrame() Dataframe to check for missing data. check_start : bool turns 0 to 1 for the first observation, to display the start of the data as the beginning of the missing data event ...
def clear_text(self): """stub""" if (self.get_text_metadata().is_read_only() or self.get_text_metadata().is_required()): raise NoAccess() self.my_osid_object_form._my_map['text'] = \ dict(self.get_text_metadata().get_default_string_values()[0])
stub
def add_px_err(isoel, col1, col2, px_um, inplace=False): """Undo pixelation correction Isoelasticity lines are already corrected for pixelation effects as described in Mapping of Deformation to Apparent Young's Modulus in Real-Time Deformability Cytometry Christoph Hero...
Undo pixelation correction Isoelasticity lines are already corrected for pixelation effects as described in Mapping of Deformation to Apparent Young's Modulus in Real-Time Deformability Cytometry Christoph Herold, arXiv:1704.00572 [cond-mat.soft] (2017) https://arxiv.or...
def preorder_iter(self, filter_fn=None): """ From DendroPy Preorder traversal of self and its child_nodes. Returns self and all descendants such that a node is returned before its child_nodes (and their child_nodes). Filtered by filter_fn: node is only returned if no filter_fn i...
From DendroPy Preorder traversal of self and its child_nodes. Returns self and all descendants such that a node is returned before its child_nodes (and their child_nodes). Filtered by filter_fn: node is only returned if no filter_fn is given or if filter_fn returns True.
def get_el_sp(obj): """ Utility method to get an Element or Specie from an input obj. If obj is in itself an element or a specie, it is returned automatically. If obj is an int or a string representing an integer, the Element with the atomic number obj is returned. If obj is a string, Specie par...
Utility method to get an Element or Specie from an input obj. If obj is in itself an element or a specie, it is returned automatically. If obj is an int or a string representing an integer, the Element with the atomic number obj is returned. If obj is a string, Specie parsing will be attempted (e.g., Mn...
def queues(self, page=None, per_page=None, previous=None, prefix=None): """Execute an HTTP request to get a list of queues and return it. Keyword arguments: page -- The 0-based page to get queues from. Defaults to None, which omits the parameter. """ options = {}...
Execute an HTTP request to get a list of queues and return it. Keyword arguments: page -- The 0-based page to get queues from. Defaults to None, which omits the parameter.
def foreach_(ctx, seq, expr): ''' Yields the result of applying an expression to each item in the input sequence. * seq: input sequence * expr: expression to be converted to string, then dynamically evaluated for each item on the sequence to produce the result ''' from . import context, parse a...
Yields the result of applying an expression to each item in the input sequence. * seq: input sequence * expr: expression to be converted to string, then dynamically evaluated for each item on the sequence to produce the result
def decode_varint_1(buffer, pos=0): """ Decode an integer from a varint presentation. See https://developers.google.com/protocol-buffers/docs/encoding?csw=1#varints on how those can be produced. Arguments: buffer (bytes-like): any object acceptable by ``memoryview`` pos (int...
Decode an integer from a varint presentation. See https://developers.google.com/protocol-buffers/docs/encoding?csw=1#varints on how those can be produced. Arguments: buffer (bytes-like): any object acceptable by ``memoryview`` pos (int): optional position to read from R...
def filter_by(zips=_zips, **kwargs): """ Use `kwargs` to select for desired attributes from list of zipcode dicts """ return [z for z in zips if all([k in z and z[k] == v for k, v in kwargs.items()])]
Use `kwargs` to select for desired attributes from list of zipcode dicts
def fillDataProducts(self, dps): """Fills listview with existing data products""" item = None for dp in dps: if not dp.ignored: item = self._makeDPItem(self, dp, item) # ensure combobox widgets are made self._itemComboBox(item, self.Col...
Fills listview with existing data products
def layout(request, ident, stateless=False, cache_id=None, **kwargs): 'Return the layout of the dash application' _, app = DashApp.locate_item(ident, stateless) view_func = app.locate_endpoint_function('dash-layout') resp = view_func() initial_arguments = get_initial_arguments(request, cache_id) ...
Return the layout of the dash application
def _get_wv(sentence, ignore=False): ''' get word2vec data by sentence sentence is segmented string. ''' global _vectors vectors = [] for y in sentence: y_ = any2unicode(y).strip() if y_ not in _stopwords: syns = nearby(y_)[0] # print("sentence %s word...
get word2vec data by sentence sentence is segmented string.
def parse(self, limit=None): """ Override Source.parse() Args: :param limit (int, optional) limit the number of rows processed Returns: :return None """ if limit is not None: LOG.info("Only parsing first %d rows", limit) rgd_fi...
Override Source.parse() Args: :param limit (int, optional) limit the number of rows processed Returns: :return None
def createEditor(self, delegate, parent, option): """ Creates a ColorCtiEditor. For the parameters see the AbstractCti constructor documentation. """ return ColorCtiEditor(self, delegate, parent=parent)
Creates a ColorCtiEditor. For the parameters see the AbstractCti constructor documentation.
def list_images(self): """Gets Docker image list. :returns: list of dicts :rtype: list """ images = [] for image in (yield from self.query("GET", "images/json", params={"all": 0})): if image['RepoTags']: for tag in image['RepoTags']: ...
Gets Docker image list. :returns: list of dicts :rtype: list
def _evolve_reader(in_file): """Generate a list of region IDs and trees from a top_k_trees evolve.py file. """ cur_id_list = None cur_tree = None with open(in_file) as in_handle: for line in in_handle: if line.startswith("id,"): if cur_id_list: ...
Generate a list of region IDs and trees from a top_k_trees evolve.py file.
async def send_data(self, data, addr): """ Send data to a remote host via the TURN server. """ channel = self.peer_to_channel.get(addr) if channel is None: channel = self.channel_number self.channel_number += 1 self.channel_to_peer[channel] = a...
Send data to a remote host via the TURN server.
def match_bitap(self, text, pattern, loc): """Locate the best instance of 'pattern' in 'text' near 'loc' using the Bitap algorithm. Args: text: The text to search. pattern: The pattern to search for. loc: The location to search around. Returns: Best match index or -1. """ ...
Locate the best instance of 'pattern' in 'text' near 'loc' using the Bitap algorithm. Args: text: The text to search. pattern: The pattern to search for. loc: The location to search around. Returns: Best match index or -1.
def format_results(self, results): """ Format the ldap results object into somthing that is reasonable """ if not results: return None userdn = results[0][0] userobj = results[0][1] userobj['dn'] = userdn keymap = self.config.get('KEY_MAP') ...
Format the ldap results object into somthing that is reasonable
def get_hstwcs(filename,hdulist,extnum): """ Return the HSTWCS object for a given chip. """ hdrwcs = wcsutil.HSTWCS(hdulist,ext=extnum) hdrwcs.filename = filename hdrwcs.expname = hdulist[extnum].header['expname'] hdrwcs.extver = hdulist[extnum].header['extver'] return hdrwcs
Return the HSTWCS object for a given chip.
def _get_nets_krnic(self, *args, **kwargs): """ Deprecated. This will be removed in a future release. """ from warnings import warn warn('NIRWhois._get_nets_krnic() has been deprecated and will be ' 'removed. You should now use NIRWhois.get_nets_krnic().') r...
Deprecated. This will be removed in a future release.
def filetree(self): """ :attr:`files` as a dictionary tree Each node is a ``dict`` that maps directory/file names to child nodes. Each child node is a ``dict`` for directories and ``None`` for files. If :attr:`path` is ``None``, this is an empty ``dict``. """ tr...
:attr:`files` as a dictionary tree Each node is a ``dict`` that maps directory/file names to child nodes. Each child node is a ``dict`` for directories and ``None`` for files. If :attr:`path` is ``None``, this is an empty ``dict``.
def jsonnummultby(self, name, path, number): """ Multiplies the numeric (integer or floating point) JSON value under ``path`` at key ``name`` with the provided ``number`` """ return self.execute_command('JSON.NUMMULTBY', name, str_path(path), self._encode(number))
Multiplies the numeric (integer or floating point) JSON value under ``path`` at key ``name`` with the provided ``number``
def do_read(self, args): """read <addr> ( <objid> ( <prop> [ <indx> ] )... )...""" args = args.split() if _debug: ReadPropertyMultipleConsoleCmd._debug("do_read %r", args) try: i = 0 addr = args[i] i += 1 read_access_spec_list = [] ...
read <addr> ( <objid> ( <prop> [ <indx> ] )... )...
def array2tree(arr, name='tree', tree=None): """Convert a numpy structured array into a ROOT TTree. Fields of basic types, strings, and fixed-size subarrays of basic types are supported. ``np.object`` and ``np.float16`` are currently not supported. Parameters ---------- arr : array A n...
Convert a numpy structured array into a ROOT TTree. Fields of basic types, strings, and fixed-size subarrays of basic types are supported. ``np.object`` and ``np.float16`` are currently not supported. Parameters ---------- arr : array A numpy structured array name : str (optional, defa...
def prov(self): """ :return: This bundle's provenance :rtype: :py:class:`prov.model.ProvDocument` """ if not self._prov: self._prov = self._api.get_bundle(self._document.id, self._id) return self._prov
:return: This bundle's provenance :rtype: :py:class:`prov.model.ProvDocument`
def update_rtfilters(self): """Updates RT filters for each peer. Should be called if a new RT Nlri's have changed based on the setting. Currently only used by `Processor` to update the RT filters after it has processed a RT destination. If RT filter has changed for a peer we cal...
Updates RT filters for each peer. Should be called if a new RT Nlri's have changed based on the setting. Currently only used by `Processor` to update the RT filters after it has processed a RT destination. If RT filter has changed for a peer we call RT filter change handler.
def get_token_by_code(self, code): '''return origin json''' url = 'https://openapi.youku.com/v2/oauth2/token' data = {'client_id': self.client_id, 'client_secret': self.client_secret, 'grant_type': 'authorization_code', 'code': code, ...
return origin json
def fetch_weeks(self, weeks, overwrite=False): """Fetch and cache the requested weeks.""" esf = ElasticsearchFetcher(self.store, self.config) for year, week in weeks: print("Fetch {}-{}".format(year, week)) esf.fetch(year, week, overwrite)
Fetch and cache the requested weeks.
def delete_router(self, router): ''' Delete the specified router ''' router_id = self._find_router_id(router) ret = self.network_conn.delete_router(router=router_id) return ret if ret else True
Delete the specified router
def case_insensitive(self): """Matching packages distinguish between uppercase and lowercase """ if "--case-ins" in self.flag: data_dict = Utils().case_sensitive(self.data) for key, value in data_dict.iteritems(): if key == self.name.lower(): ...
Matching packages distinguish between uppercase and lowercase
def execute_system_command(arg, **_): """Execute a system shell command.""" usage = "Syntax: system [command].\n" if not arg: return [(None, None, None, usage)] try: command = arg.strip() if command.startswith('cd'): ok, error_message = handle_cd_command(arg) ...
Execute a system shell command.
def print_pack(document_loader, # type: Loader processobj, # type: CommentedMap uri, # type: Text metadata # type: Dict[Text, Any] ): # type (...) -> Text """Return a CWL serialization of the CWL document in JSON.""" packed...
Return a CWL serialization of the CWL document in JSON.
def get_limit_action(self, criticity, stat_name=""): """Return the tuple (action, repeat) for the alert. - action is a command line - repeat is a bool """ # Get the action for stat + header # Exemple: network_wlan0_rx_careful_action # Action key available ? ...
Return the tuple (action, repeat) for the alert. - action is a command line - repeat is a bool
def tryLoadingFrom(tryPath,moduleName='swhlab'): """if the module is in this path, load it from the local folder.""" if not 'site-packages' in swhlab.__file__: print("loaded custom swhlab module from", os.path.dirname(swhlab.__file__)) return # no need to warn if it's already outsi...
if the module is in this path, load it from the local folder.
def linearRegression(requestContext, seriesList, startSourceAt=None, endSourceAt=None): """ Graphs the liner regression function by least squares method. Takes one metric or a wildcard seriesList, followed by a quoted string with the time to start the line and another quoted string...
Graphs the liner regression function by least squares method. Takes one metric or a wildcard seriesList, followed by a quoted string with the time to start the line and another quoted string with the time to end the line. The start and end times are inclusive (default range is from to until). See ``fro...
def save_image(self, img, filename=None, **kwargs): # floating_point=False, """Save the image to the given *filename* in ninjotiff_ format. .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ filename = filename or self.get_filename(*...
Save the image to the given *filename* in ninjotiff_ format. .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html
def create_token_for_user(user: get_user_model()) -> bytes: """ Create a new random auth token for user. """ token = urandom(48) AuthToken.objects.create( hashed_token=AuthToken._hash_token(token), user=user) return token
Create a new random auth token for user.
def _find_by_sha1(self, sha1): """ Return an |ImagePart| object belonging to this package or |None| if no matching image part is found. The image part is identified by the SHA1 hash digest of the image binary it contains. """ for image_part in self: # ---skip ...
Return an |ImagePart| object belonging to this package or |None| if no matching image part is found. The image part is identified by the SHA1 hash digest of the image binary it contains.
def dijkstra(G, start, weight='weight'): """ Compute shortest path length between satrt and all other reachable nodes for a weight graph. return -> ({vertex: weight form start, }, {vertex: predeseccor, }) """ if start not in G.vertices: raise GraphInsertError("Vertex %s doesn't ex...
Compute shortest path length between satrt and all other reachable nodes for a weight graph. return -> ({vertex: weight form start, }, {vertex: predeseccor, })
def plot_report(report, success_name, fail_names, label=None, is_max_confidence=True, linewidth=LINEWIDTH, plot_upper_bound=True): """ Plot a success fail curve from a confidence report :param report: A confidence report (the type of object saved by make_confide...
Plot a success fail curve from a confidence report :param report: A confidence report (the type of object saved by make_confidence_report.py) :param success_name: see plot_report_from_path :param fail_names: see plot_report_from_path :param label: see plot_report_from_path :param is_max_confidence: see pl...
def longestorf(args): """ %prog longestorf fastafile Find longest ORF for each sequence in fastafile. """ p = OptionParser(longestorf.__doc__) p.add_option("--ids", action="store_true", help="Generate table with ORF info [default: %default]") opts, args = p.parse_args(args)...
%prog longestorf fastafile Find longest ORF for each sequence in fastafile.
def get_bins(self): """Gets the bin list resulting from the search. return: (osid.resource.BinList) - the bin list raise: IllegalState - list already retrieved *compliance: mandatory -- This method must be implemented.* """ if self.retrieved: raise errors.I...
Gets the bin list resulting from the search. return: (osid.resource.BinList) - the bin list raise: IllegalState - list already retrieved *compliance: mandatory -- This method must be implemented.*
def resolve(input, representation, resolvers=None, get3d=False, **kwargs): """Resolve input to the specified output representation. :param string input: Chemical identifier to resolve :param string representation: Desired output representation :param list(string) resolvers: (Optional) Ordered list of r...
Resolve input to the specified output representation. :param string input: Chemical identifier to resolve :param string representation: Desired output representation :param list(string) resolvers: (Optional) Ordered list of resolvers to use :param bool get3d: (Optional) Whether to return 3D coordinates...
def _cast(cls, base_info, take_ownership=True): """Casts a GIBaseInfo instance to the right sub type. The original GIBaseInfo can't have ownership. Will take ownership. """ type_value = base_info.type.value try: new_obj = cast(base_info, cls.__types[type_val...
Casts a GIBaseInfo instance to the right sub type. The original GIBaseInfo can't have ownership. Will take ownership.
def request_handler(self, can_handle_func): # type: (Callable[[Input], bool]) -> Callable """Decorator that can be used to add request handlers easily to the builder. The can_handle_func has to be a Callable instance, which takes a single parameter and no varargs or kwargs. This...
Decorator that can be used to add request handlers easily to the builder. The can_handle_func has to be a Callable instance, which takes a single parameter and no varargs or kwargs. This is because of the RequestHandler class signature restrictions. The returned wrapper function...
def add_lifecycle_set_storage_class_rule(self, storage_class, **kw): """Add a "delete" rule to lifestyle rules configured for this bucket. See https://cloud.google.com/storage/docs/lifecycle and https://cloud.google.com/storage/docs/json_api/v1/buckets .. literalinclude:: snippets...
Add a "delete" rule to lifestyle rules configured for this bucket. See https://cloud.google.com/storage/docs/lifecycle and https://cloud.google.com/storage/docs/json_api/v1/buckets .. literalinclude:: snippets.py :start-after: [START add_lifecycle_set_storage_class_rule] ...