code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def featurecounts_stats_table(self): """ Take the parsed stats from the featureCounts report and add them to the basic stats table at the top of the report """ headers = OrderedDict() headers['percent_assigned'] = { 'title': '% Assigned', 'description': '% Assign...
Take the parsed stats from the featureCounts report and add them to the basic stats table at the top of the report
def argument_kind(args): # type: (List[Argument]) -> Optional[str] """Return the kind of an argument, based on one or more descriptions of the argument. Return None if every item does not have the same kind. """ kinds = set(arg.kind for arg in args) if len(kinds) != 1: return None r...
Return the kind of an argument, based on one or more descriptions of the argument. Return None if every item does not have the same kind.
def slackbuild(self, name, sbo_file): """Read SlackBuild file """ return URL(self.sbo_url + name + sbo_file).reading()
Read SlackBuild file
def _import_protobuf_from_file(grpc_pyfile, method_name, service_name = None): """ helper function which try to import method from the given _pb2_grpc.py file service_name should be provided only in case of name conflict return (False, None) in case of failure return (True, (stub_class, request_cla...
helper function which try to import method from the given _pb2_grpc.py file service_name should be provided only in case of name conflict return (False, None) in case of failure return (True, (stub_class, request_class, response_class)) in case of success
def get(self, request): """ GET /consent/api/v1/data_sharing_consent?username=bob&course_id=id&enterprise_customer_uuid=uuid *username* The edX username from whom to get consent. *course_id* The course for which consent is granted. *enterprise_customer_uui...
GET /consent/api/v1/data_sharing_consent?username=bob&course_id=id&enterprise_customer_uuid=uuid *username* The edX username from whom to get consent. *course_id* The course for which consent is granted. *enterprise_customer_uuid* The UUID of the enterprise cu...
def _parse_proc_mount(self): """Parse /proc/mounts""" """ cgroup /cgroup/cpu cgroup rw,relatime,cpuacct,cpu,release_agent=/sbin/cgroup_clean 0 0 cgroup /cgroup/memory cgroup rw,relatime,memory 0 0 cgroup /cgroup/blkio cgroup rw,relatime,blkio 0 0 cgroup /cgroup/freezer c...
Parse /proc/mounts
def _is_and_or_ternary(node): """ Returns true if node is 'condition and true_value or false_value' form. All of: condition, true_value and false_value should not be a complex boolean expression """ return ( isinstance(node, astroid.BoolOp) and node.op ==...
Returns true if node is 'condition and true_value or false_value' form. All of: condition, true_value and false_value should not be a complex boolean expression
def sort_layout(thread, listfile, column=0): """ Sort the syntelog table according to chromomomal positions. First orient the contents against threadbed, then for contents not in threadbed, insert to the nearest neighbor. """ from jcvi.formats.base import DictFile outfile = listfile.rsplit(...
Sort the syntelog table according to chromomomal positions. First orient the contents against threadbed, then for contents not in threadbed, insert to the nearest neighbor.
def construct_formset(self): """ Returns an instance of the formset """ formset_class = self.get_formset() if hasattr(self, 'get_extra_form_kwargs'): klass = type(self).__name__ raise DeprecationWarning( 'Calling {0}.get_extra_form_kwargs i...
Returns an instance of the formset
def list_patterns(refresh=False, root=None): ''' List all known patterns from available repos. refresh force a refresh if set to True. If set to False (default) it depends on zypper if a refresh is executed. root operate on a different root directory. CLI Examples:...
List all known patterns from available repos. refresh force a refresh if set to True. If set to False (default) it depends on zypper if a refresh is executed. root operate on a different root directory. CLI Examples: .. code-block:: bash salt '*' pkg.list_pat...
def attrs(self): """provide a copy of this player's attributes as a dictionary""" ret = dict(self.__dict__) # obtain copy of internal __dict__ del ret["_matches"] # match history is specifically distinguished from player information (and stored separately) if self.type != c.COMPUTER: # d...
provide a copy of this player's attributes as a dictionary
def parse(expected, query): """ Parse query parameters. :type expected: `dict` mapping `bytes` to `callable` :param expected: Mapping of query argument names to argument parsing callables. :type query: `dict` mapping `bytes` to `list` of `bytes` :param query: Mapping of query argumen...
Parse query parameters. :type expected: `dict` mapping `bytes` to `callable` :param expected: Mapping of query argument names to argument parsing callables. :type query: `dict` mapping `bytes` to `list` of `bytes` :param query: Mapping of query argument names to lists of argument values, ...
def _linux_brdel(br): ''' Internal, deletes the bridge ''' brctl = _tool_path('brctl') return __salt__['cmd.run']('{0} delbr {1}'.format(brctl, br), python_shell=False)
Internal, deletes the bridge
def reset(self): """ Calls `reset` on all our Preprocessor objects. Returns: A list of tensors to be fetched. """ fetches = [] for processor in self.preprocessors: fetches.extend(processor.reset() or []) return fetches
Calls `reset` on all our Preprocessor objects. Returns: A list of tensors to be fetched.
def load_blotter_args(blotter_name=None, logger=None): """ Load running blotter's settings (used by clients) :Parameters: blotter_name : str Running Blotter's name (defaults to "auto-detect") logger : object Logger to be use (defaults to Blotter's) :Returns: ...
Load running blotter's settings (used by clients) :Parameters: blotter_name : str Running Blotter's name (defaults to "auto-detect") logger : object Logger to be use (defaults to Blotter's) :Returns: args : dict Running Blotter's arguments
def nlargest(n, mapping): """ Takes a mapping and returns the n keys associated with the largest values in descending order. If the mapping has fewer than n items, all its keys are returned. Equivalent to: ``next(zip(*heapq.nlargest(mapping.items(), key=lambda x: x[1])))`` Returns ...
Takes a mapping and returns the n keys associated with the largest values in descending order. If the mapping has fewer than n items, all its keys are returned. Equivalent to: ``next(zip(*heapq.nlargest(mapping.items(), key=lambda x: x[1])))`` Returns ------- list of up to n keys from ...
def get_edge_type(self, edge_type): """Returns all edges with the specified edge type. Parameters ---------- edge_type : int An integer specifying what type of edges to return. Returns ------- out : list of 2-tuples A list of 2-tuples rep...
Returns all edges with the specified edge type. Parameters ---------- edge_type : int An integer specifying what type of edges to return. Returns ------- out : list of 2-tuples A list of 2-tuples representing the edges in the graph wi...
def get_hdulist_idx(self, ccdnum): """ The SourceCutout is a list of HDUs, this method returns the index of the HDU that corresponds to the given ccd number. CCDs are numbers from 0, but the first CCD (CCDNUM=0) is often in extension 1 of an MEF. @param ccdnum: the number of the CCD in ...
The SourceCutout is a list of HDUs, this method returns the index of the HDU that corresponds to the given ccd number. CCDs are numbers from 0, but the first CCD (CCDNUM=0) is often in extension 1 of an MEF. @param ccdnum: the number of the CCD in the MEF that is being referenced. @return: the ...
def plot_cdf(fignum, data, xlab, sym, title, **kwargs): """ Makes a plot of the cumulative distribution function. Parameters __________ fignum : matplotlib figure number data : list of data to be plotted - doesn't need to be sorted sym : matplotlib symbol for plotting, e.g., 'r--' for a red dash...
Makes a plot of the cumulative distribution function. Parameters __________ fignum : matplotlib figure number data : list of data to be plotted - doesn't need to be sorted sym : matplotlib symbol for plotting, e.g., 'r--' for a red dashed line **kwargs : optional dictionary with {'color': color...
def parse_django_adminopt_node(env, sig, signode): """A copy of sphinx.directives.CmdoptionDesc.parse_signature()""" from sphinx.domains.std import option_desc_re count = 0 firstname = '' for m in option_desc_re.finditer(sig): optname, args = m.groups() if count: signode ...
A copy of sphinx.directives.CmdoptionDesc.parse_signature()
def prime(self, key, value): # type: (Hashable, Any) -> DataLoader """ Adds the provied key and value to the cache. If the key already exists, no change is made. Returns itself for method chaining. """ cache_key = self.get_cache_key(key) # Only add the key if it ...
Adds the provied key and value to the cache. If the key already exists, no change is made. Returns itself for method chaining.
def content(self): """以处理过的Html代码形式返回答案内容. :return: 答案内容 :rtype: str """ answer_wrap = self.soup.find('div', id='zh-question-answer-wrap') content = answer_wrap.find('div', class_='zm-editable-content') content = answer_content_process(content) return con...
以处理过的Html代码形式返回答案内容. :return: 答案内容 :rtype: str
def enable(self): """ (Re)enable the cache """ logger.debug('enable()') self.options.enabled = True logger.info('cache enabled')
(Re)enable the cache
def fail(message, exception_data=None): """ Print a failure message and exit nonzero """ print(message, file=sys.stderr) if exception_data: print(repr(exception_data)) sys.exit(1)
Print a failure message and exit nonzero
def get_installed_distributions(local_only=True, skip=stdlib_pkgs, include_editables=True, editables_only=False, user_only=False): # type: (bool, Container[str], bool, bool, bool) -> List[...
Return a list of installed Distribution objects. If ``local_only`` is True (default), only return installations local to the current virtualenv, if in a virtualenv. ``skip`` argument is an iterable of lower-case project names to ignore; defaults to stdlib_pkgs If ``include_editables`` is False, d...
def _fill_empty_sessions(self, fill_subjects, fill_visits): """ Fill in tree with additional empty subjects and/or visits to allow the study to pull its inputs from external repositories """ if fill_subjects is None: fill_subjects = [s.id for s in self.subjects] ...
Fill in tree with additional empty subjects and/or visits to allow the study to pull its inputs from external repositories
def _build_response(self): """ Builds the composite reponse to be output by the module by looping through all events and formatting the necessary strings. Returns: A composite containing the individual response for each event. """ responses = [] self.event_urls =...
Builds the composite reponse to be output by the module by looping through all events and formatting the necessary strings. Returns: A composite containing the individual response for each event.
def _source_info(): """ Get information from the user's code (two frames up) to leave breadcrumbs for file, line, class and function. """ ofi = inspect.getouterframes(inspect.currentframe())[2] try: calling_class = ofi[0].f_locals['self'].__class__ except KeyError: calling_cl...
Get information from the user's code (two frames up) to leave breadcrumbs for file, line, class and function.
def awd_lstm_lm_1150(dataset_name=None, vocab=None, pretrained=False, ctx=cpu(), root=os.path.join(get_home_dir(), 'models'), **kwargs): r"""3-layer LSTM language model with weight-drop, variational dropout, and tied weights. Embedding size is 400, and hidden layer size is 1150. Param...
r"""3-layer LSTM language model with weight-drop, variational dropout, and tied weights. Embedding size is 400, and hidden layer size is 1150. Parameters ---------- dataset_name : str or None, default None The dataset name on which the pre-trained model is trained. Options are 'wikitex...
def workflow_script_reject(self): """Copy real analyses to RejectAnalysis, with link to real create a new worksheet, with the original analyses, and new duplicates and references to match the rejected worksheet. """ if skip(self, "reject"): return ...
Copy real analyses to RejectAnalysis, with link to real create a new worksheet, with the original analyses, and new duplicates and references to match the rejected worksheet.
def multiglob_compile(globs, prefix=False): """Generate a single "A or B or C" regex from a list of shell globs. :param globs: Patterns to be processed by :mod:`fnmatch`. :type globs: iterable of :class:`~__builtins__.str` :param prefix: If ``True``, then :meth:`~re.RegexObject.match` will per...
Generate a single "A or B or C" regex from a list of shell globs. :param globs: Patterns to be processed by :mod:`fnmatch`. :type globs: iterable of :class:`~__builtins__.str` :param prefix: If ``True``, then :meth:`~re.RegexObject.match` will perform prefix matching rather than exact string match...
def _compute_mean(self, C, mag, r): """ Compute mean value according to equation 30, page 1021. """ mean = (C['c1'] + self._compute_term1(C, mag) + self._compute_term2(C, mag, r)) return mean
Compute mean value according to equation 30, page 1021.
def updateUserTone(conversationPayload, toneAnalyzerPayload, maintainHistory): """ updateUserTone processes the Tone Analyzer payload to pull out the emotion, writing and social tones, and identify the meaningful tones (i.e., those tones that meet the specified thresholds). The conversationPayload j...
updateUserTone processes the Tone Analyzer payload to pull out the emotion, writing and social tones, and identify the meaningful tones (i.e., those tones that meet the specified thresholds). The conversationPayload json object is updated to include these tones. @param conversationPayload json object re...
def set_rich_menu_image(self, rich_menu_id, content_type, content, timeout=None): """Call upload rich menu image API. https://developers.line.me/en/docs/messaging-api/reference/#upload-rich-menu-image Uploads and attaches an image to a rich menu. :param str rich_menu_id: IDs of the ri...
Call upload rich menu image API. https://developers.line.me/en/docs/messaging-api/reference/#upload-rich-menu-image Uploads and attaches an image to a rich menu. :param str rich_menu_id: IDs of the richmenu :param str content_type: image/jpeg or image/png :param content: image...
def setRelay(self, seconds, relay, status, password="00000000"): """Serial call to set relay. Args: seconds (int): Seconds to hold, ero is hold forever. See :class:`~ekmmeters.RelayInterval`. relay (int): Selected relay, see :class:`~ekmmeters.Relay`. status (int): S...
Serial call to set relay. Args: seconds (int): Seconds to hold, ero is hold forever. See :class:`~ekmmeters.RelayInterval`. relay (int): Selected relay, see :class:`~ekmmeters.Relay`. status (int): Status to set, see :class:`~ekmmeters.RelayState` password (str):...
def resolve_remote(self, uri): """Resolve a uri or relative path to a schema.""" try: return super(LocalRefResolver, self).resolve_remote(uri) except ValueError: return super(LocalRefResolver, self).resolve_remote( 'file://' + get_schema_path(uri.rsplit('....
Resolve a uri or relative path to a schema.
def _upload(param_dict, timeout, data): """ Calls upload either with a local audio file, or a url. Returns a track object. """ param_dict['format'] = 'json' param_dict['wait'] = 'true' param_dict['bucket'] = 'audio_summary' result = util.callm('track/upload', param_dict, POST = True, soc...
Calls upload either with a local audio file, or a url. Returns a track object.
def blame_incremental(self, rev, file, **kwargs): """Iterator for blame information for the given file at the given revision. Unlike .blame(), this does not return the actual file's contents, only a stream of BlameEntry tuples. :param rev: revision specifier, see git-rev-parse for viab...
Iterator for blame information for the given file at the given revision. Unlike .blame(), this does not return the actual file's contents, only a stream of BlameEntry tuples. :param rev: revision specifier, see git-rev-parse for viable options. :return: lazy iterator of BlameEntry tupl...
def gen_image(img, width, height, outfile, img_type='grey'): """Save an image with the given parameters.""" assert len(img) == width * height or len(img) == width * height * 3 if img_type == 'grey': misc.imsave(outfile, img.reshape(width, height)) elif img_type == 'color': misc.imsave(...
Save an image with the given parameters.
def sign(self, storepass=None, keypass=None, keystore=None, apk=None, alias=None, name='app'): """ Signs (jarsign and zipalign) a target apk file based on keystore information, uses default debug keystore file by default. :param storepass(str): keystore file storepass :param keypass(str): keystore file...
Signs (jarsign and zipalign) a target apk file based on keystore information, uses default debug keystore file by default. :param storepass(str): keystore file storepass :param keypass(str): keystore file keypass :param keystore(str): keystore file path :param apk(str): apk file path to be signed :...
def validate(tool_class, model_class): """ Does basic ObjectTool option validation. """ if not hasattr(tool_class, 'name'): raise ImproperlyConfigured("No 'name' attribute found for tool %s." % ( tool_class.__name__ )) if not hasattr(tool_class, 'label'): raise I...
Does basic ObjectTool option validation.
def run(self): """Run directive.""" try: language = self.arguments[0] except IndexError: language = '' code = '\n'.join(self.content) literal = docutils.nodes.literal_block(code, code) literal['classes'].append('code-block') literal['langua...
Run directive.
def _setup_metric_group_definitions(self): """ Return the dict of MetricGroupDefinition objects for this metrics context, by processing its 'metric-group-infos' property. """ # Dictionary of MetricGroupDefinition objects, by metric group name metric_group_definitions = di...
Return the dict of MetricGroupDefinition objects for this metrics context, by processing its 'metric-group-infos' property.
def register(device, data, facet): """ Register a U2F device data = { "version": "U2F_V2", "challenge": string, //b64 encoded challenge "appId": string, //app_id } """ if isinstance(data, string_types): data = json.loads(data) if data['version'] != VERSION...
Register a U2F device data = { "version": "U2F_V2", "challenge": string, //b64 encoded challenge "appId": string, //app_id }
def convert_uen(pinyin): """uen 转换,还原原始的韵母 iou,uei,uen前面加声母的时候,写成iu,ui,un。 例如niu(牛),gui(归),lun(论)。 """ return UN_RE.sub(lambda m: m.group(1) + UN_MAP[m.group(2)], pinyin)
uen 转换,还原原始的韵母 iou,uei,uen前面加声母的时候,写成iu,ui,un。 例如niu(牛),gui(归),lun(论)。
def _buffer_decode(self, input, errors, final): """ Decode bytes that may be arriving in a stream, following the Codecs API. `input` is the incoming sequence of bytes. `errors` tells us how to handle errors, though we delegate all error-handling cases to the real UTF-8 d...
Decode bytes that may be arriving in a stream, following the Codecs API. `input` is the incoming sequence of bytes. `errors` tells us how to handle errors, though we delegate all error-handling cases to the real UTF-8 decoder to ensure correct behavior. `final` indicates whether ...
def staticproperty(func): """Use as a decorator on a method definition to make it a class-level attribute (without binding). This decorator can be applied to a method or a staticmethod. This decorator does not bind any arguments. Usage: >>> other_x = 'value' >>> class Foo(object): ... @staticproperty ...
Use as a decorator on a method definition to make it a class-level attribute (without binding). This decorator can be applied to a method or a staticmethod. This decorator does not bind any arguments. Usage: >>> other_x = 'value' >>> class Foo(object): ... @staticproperty ... def x(): ... retu...
def get(company='', company_uri=''): """Performs a HTTP GET for a glassdoor page and returns json""" if not company and not company_uri: raise Exception("glassdoor.gd.get(company='', company_uri=''): "\ " company or company_uri required") payload = {} if not company_u...
Performs a HTTP GET for a glassdoor page and returns json
def from_df(cls, ratings:DataFrame, valid_pct:float=0.2, user_name:Optional[str]=None, item_name:Optional[str]=None, rating_name:Optional[str]=None, test:DataFrame=None, seed:int=None, path:PathOrStr='.', bs:int=64, val_bs:int=None, num_workers:int=defaults.cpus, dl_tfms:Optional[Collec...
Create a `DataBunch` suitable for collaborative filtering from `ratings`.
def query_subdevice2index(self, ncfile) -> Subdevice2Index: """Return a |Subdevice2Index| that maps the (sub)device names to their position within the given NetCDF file. Method |NetCDFVariableBase.query_subdevice2index| is based on |NetCDFVariableBase.query_subdevices|. The returned ...
Return a |Subdevice2Index| that maps the (sub)device names to their position within the given NetCDF file. Method |NetCDFVariableBase.query_subdevice2index| is based on |NetCDFVariableBase.query_subdevices|. The returned |Subdevice2Index| object remembers the NetCDF file the (s...
def set_levels(self, levels): """ Replace the levels of a categorical column. New levels must be aligned with the old domain. This call has copy-on-write semantics. :param List[str] levels: A list of strings specifying the new levels. The number of new levels must match the...
Replace the levels of a categorical column. New levels must be aligned with the old domain. This call has copy-on-write semantics. :param List[str] levels: A list of strings specifying the new levels. The number of new levels must match the number of old levels. :returns: A single-...
def _to_power_basis_degree8(nodes1, nodes2): r"""Compute the coefficients of an **intersection polynomial**. Helper for :func:`to_power_basis` in the case that B |eacute| zout's `theorem`_ tells us the **intersection polynomial** is degree :math:`8`. This happens if the two curves have degrees one and ...
r"""Compute the coefficients of an **intersection polynomial**. Helper for :func:`to_power_basis` in the case that B |eacute| zout's `theorem`_ tells us the **intersection polynomial** is degree :math:`8`. This happens if the two curves have degrees one and eight or have degrees two and four. .. n...
def _raise_error_routes(iface, option, expected): ''' Log and raise an error with a logical formatted message. ''' msg = _error_msg_routes(iface, option, expected) log.error(msg) raise AttributeError(msg)
Log and raise an error with a logical formatted message.
def convert_coordinates(coords, origin, wgs84, wrapped): """ Convert coordinates from one crs to another """ if isinstance(coords, list) or isinstance(coords, tuple): try: if isinstance(coords[0], list) or isinstance(coords[0], tuple): return [convert_coordinates(list(c), ori...
Convert coordinates from one crs to another
def stringify_summary(summary): """ stringify summary, in order to dump json file and generate html report. """ for index, suite_summary in enumerate(summary["details"]): if not suite_summary.get("name"): suite_summary["name"] = "testcase {}".format(index) for record in suite_s...
stringify summary, in order to dump json file and generate html report.
def __rst2graph(self, rs3_xml_tree): """ Reads an RST tree (from an ElementTree representation of an RS3 XML file) and adds all segments (nodes representing text) and groups (nonterminal nodes in an RST tree) as well as the relationships that hold between them (typed edges) to th...
Reads an RST tree (from an ElementTree representation of an RS3 XML file) and adds all segments (nodes representing text) and groups (nonterminal nodes in an RST tree) as well as the relationships that hold between them (typed edges) to this RSTGraph. Parameters --------...
def _merge_meta(self, encoded_meta, meta): """ Merge new meta dict into encoded meta. Returns new encoded meta. """ new_meta = None if meta: _meta = self._decode_meta(encoded_meta) for key, value in six.iteritems(meta): if value is None: ...
Merge new meta dict into encoded meta. Returns new encoded meta.
def modify(self, modification, parameters): """ Apply a modification to the underlying point sources, with the same parameters for all sources """ for src in self: src.modify(modification, parameters)
Apply a modification to the underlying point sources, with the same parameters for all sources
def insert(self, table, columns, values, execute=True): """Insert a single row into a table.""" # TODO: Cant accept lists? # Concatenate statement cols, vals = get_col_val_str(columns) statement = "INSERT INTO {0} ({1}) VALUES ({2})".format(wrap(table), cols, vals) # Exe...
Insert a single row into a table.
def get_random_user(self): """ Gets a random user from the provider :returns: Dictionary """ from provider.models import User u = User.objects.order_by('?')[0] return {"username": u.username, "password": u.password, "fullname": u.fullname}
Gets a random user from the provider :returns: Dictionary
def pre_save(self, model_instance, add): """Updates username created on ADD only.""" value = super(UserField, self).pre_save(model_instance, add) if not value and not add: # fall back to OS user if not accessing through browser # better than nothing ... value ...
Updates username created on ADD only.
def ci(data, statfunction=None, alpha=0.05, n_samples=10000, method='bca', output='lowhigh', epsilon=0.001, multi=None, _iter=True): """ Given a set of data ``data``, and a statistics function ``statfunction`` that applies to that data, computes the bootstrap confidence interval for ``statfunction`` o...
Given a set of data ``data``, and a statistics function ``statfunction`` that applies to that data, computes the bootstrap confidence interval for ``statfunction`` on that data. Data points are assumed to be delineated by axis 0. Parameters ---------- data: array_like, shape (N, ...) OR tuple of array_like all with sh...
def show(self): """ Display (with a pretty print) this object """ off = 0 for n, i in enumerate(self.get_instructions()): print("{:8d} (0x{:08x}) {:04x} {:30} {}".format(n, off, i.get_op_value(), i.get_name(), i.get_output(self.idx))) off += i.get_length()
Display (with a pretty print) this object
def get_worksheet(self, index): """Returns a worksheet with specified `index`. :param index: An index of a worksheet. Indexes start from zero. :type index: int :returns: an instance of :class:`gsperad.models.Worksheet` or `None` if the worksheet is not found. ...
Returns a worksheet with specified `index`. :param index: An index of a worksheet. Indexes start from zero. :type index: int :returns: an instance of :class:`gsperad.models.Worksheet` or `None` if the worksheet is not found. Example. To get first worksheet of a sprea...
def as_params(self): """ Returns the filters, orders, select, expands and search as query parameters :rtype: dict """ params = {} if self.has_filters: params['$filter'] = self.get_filters() if self.has_order: params['$orderby'] = self.get_order() ...
Returns the filters, orders, select, expands and search as query parameters :rtype: dict
def convert_sed_cols(tab): """Cast SED column names to lowercase.""" # Update Column names for colname in list(tab.columns.keys()): newname = colname.lower() newname = newname.replace('dfde', 'dnde') if tab.columns[colname].name == newname: continue tab.columns...
Cast SED column names to lowercase.
def add(self, search): """ Adds a new :class:`~elasticsearch_dsl.Search` object to the request:: ms = MultiSearch(index='my-index') ms = ms.add(Search(doc_type=Category).filter('term', category='python')) ms = ms.add(Search(doc_type=Blog)) """ ms = se...
Adds a new :class:`~elasticsearch_dsl.Search` object to the request:: ms = MultiSearch(index='my-index') ms = ms.add(Search(doc_type=Category).filter('term', category='python')) ms = ms.add(Search(doc_type=Blog))
def _get_goid2dbids(associations): """Return gene2go data for user-specified taxids.""" go2ids = cx.defaultdict(set) for ntd in associations: go2ids[ntd.GO_ID].add(ntd.DB_ID) return dict(go2ids)
Return gene2go data for user-specified taxids.
def _close_prepared_statement(self): """ Close the prepared statement on the server. """ self.prepared_sql = None self.flush_to_query_ready() self.connection.write(messages.Close('prepared_statement', self.prepared_name)) self.connection.write(messages.Flush()) ...
Close the prepared statement on the server.
def delete_managed_disk(call=None, kwargs=None): # pylint: disable=unused-argument ''' Delete a managed disk from a resource group. ''' compconn = get_conn(client_type='compute') try: compconn.disks.delete(kwargs['resource_group'], kwargs['blob']) except Exception as exc: log....
Delete a managed disk from a resource group.
def distVersion(): """ The distribution version identifying a published release on PyPI. """ from pkg_resources import parse_version build_number = buildNumber() parsedBaseVersion = parse_version(baseVersion) if isinstance(parsedBaseVersion, tuple): raise RuntimeError("Setuptools ver...
The distribution version identifying a published release on PyPI.
def region_size(im): r""" Replace each voxel with size of region to which it belongs Parameters ---------- im : ND-array Either a boolean image wtih ``True`` indicating the features of interest, in which case ``scipy.ndimage.label`` will be applied to find regions, or a grey...
r""" Replace each voxel with size of region to which it belongs Parameters ---------- im : ND-array Either a boolean image wtih ``True`` indicating the features of interest, in which case ``scipy.ndimage.label`` will be applied to find regions, or a greyscale image with integer ...
def normalize_curves_eb(curves): """ A more sophisticated version of normalize_curves, used in the event based calculator. :param curves: a list of pairs (losses, poes) :returns: first losses, all_poes """ # we assume non-decreasing losses, so losses[-1] is the maximum loss non_zero_cur...
A more sophisticated version of normalize_curves, used in the event based calculator. :param curves: a list of pairs (losses, poes) :returns: first losses, all_poes
def remote_mgmt_addr_uneq_store(self, remote_mgmt_addr): """This function saves the MGMT address, if different from stored. """ if remote_mgmt_addr != self.remote_mgmt_addr: self.remote_mgmt_addr = remote_mgmt_addr return True return False
This function saves the MGMT address, if different from stored.
def run(*steps): """ Helper to run one or more async functions synchronously, with graceful handling of SIGINT / Ctrl-C. Returns the return value of the last function. """ if not steps: return task = None run._sigint = False # function attr to allow setting from closure lo...
Helper to run one or more async functions synchronously, with graceful handling of SIGINT / Ctrl-C. Returns the return value of the last function.
def NewOutputModule(cls, name, output_mediator): """Creates a new output module object for the specified output format. Args: name (str): name of the output module. output_mediator (OutputMediator): output mediator. Returns: OutputModule: output module. Raises: KeyError: if th...
Creates a new output module object for the specified output format. Args: name (str): name of the output module. output_mediator (OutputMediator): output mediator. Returns: OutputModule: output module. Raises: KeyError: if there is no output class found with the supplied name. ...
def meth_set_acl(args): """ Assign an ACL role to a list of users for a workflow. """ acl_updates = [{"user": user, "role": args.role} \ for user in set(expand_fc_groups(args.users)) \ if user != fapi.whoami()] id = args.snapshot_id if not id: # get the lat...
Assign an ACL role to a list of users for a workflow.
def p_example_multiline(self, p): """example_field : ID EQ NL INDENT ex_map NL DEDENT""" p[0] = AstExampleField( self.path, p.lineno(1), p.lexpos(1), p[1], p[5])
example_field : ID EQ NL INDENT ex_map NL DEDENT
def _CSI(self, cmd): """ Control sequence introducer """ sys.stdout.write('\x1b[') sys.stdout.write(cmd)
Control sequence introducer
def get_args_parser(): """Return a parser for command line options.""" parser = argparse.ArgumentParser( description='Marabunta: Migrating ants for Odoo') parser.add_argument('--migration-file', '-f', action=EnvDefault, envvar='MARABUNTA_MIGRATION_FILE...
Return a parser for command line options.
def find_near_matches_no_deletions_ngrams(subsequence, sequence, search_params): """search for near-matches of subsequence in sequence This searches for near-matches, where the nearly-matching parts of the sequence must meet the following limitations (relative to the subsequence): * the maximum allowe...
search for near-matches of subsequence in sequence This searches for near-matches, where the nearly-matching parts of the sequence must meet the following limitations (relative to the subsequence): * the maximum allowed number of character substitutions * the maximum allowed number of new characters i...
def taskGroupCreationRequested(self, *args, **kwargs): """ tc-gh requested the Queue service to create all the tasks in a group supposed to signal that taskCreate API has been called for every task in the task group for this particular repo and this particular organization curre...
tc-gh requested the Queue service to create all the tasks in a group supposed to signal that taskCreate API has been called for every task in the task group for this particular repo and this particular organization currently used for creating initial status indicators in GitHub UI using Statuse...
def pretty_print(self): """ Print the error message to stdout with colors and borders """ print colored.blue("-" * 40) print colored.red("datacats: problem was encountered:") print self.message print colored.blue("-" * 40)
Print the error message to stdout with colors and borders
def unix_time(self, dt): """Returns the number of seconds since the UNIX epoch for the given datetime (dt). PARAMETERS: dt -- datetime """ epoch = datetime.utcfromtimestamp(0) delta = dt - epoch return int(delta.total_seconds())
Returns the number of seconds since the UNIX epoch for the given datetime (dt). PARAMETERS: dt -- datetime
def ipv6_link_local(self, **kwargs): """Configure ipv6 link local address on interfaces on vdx switches Args: int_type: Interface type on which the ipv6 link local needs to be configured. name: 'Ve' or 'loopback' interface name. rbridge_id (str): rbridge...
Configure ipv6 link local address on interfaces on vdx switches Args: int_type: Interface type on which the ipv6 link local needs to be configured. name: 'Ve' or 'loopback' interface name. rbridge_id (str): rbridge-id for device. get (bool): Get conf...
def getIdent(self, node): """ Get the graph identifier for a node """ ident = self.getRawIdent(node) if ident is not None: return ident node = self.findNode(node) if node is None: return None return node.graphident
Get the graph identifier for a node
def bbox_to_resolution(bbox, width, height): """ Calculates pixel resolution in meters for a given bbox of a given width and height. :param bbox: bounding box :type bbox: geometry.BBox :param width: width of bounding box in pixels :type width: int :param height: height of bounding box in pixels...
Calculates pixel resolution in meters for a given bbox of a given width and height. :param bbox: bounding box :type bbox: geometry.BBox :param width: width of bounding box in pixels :type width: int :param height: height of bounding box in pixels :type height: int :return: resolution east-w...
def mark_offer_as_lose(self, offer_id): """ Mark offer as lose :param offer_id: the offer id :return Response """ return self._create_put_request( resource=OFFERS, billomat_id=offer_id, command=LOSE, )
Mark offer as lose :param offer_id: the offer id :return Response
def run(self): """ This defines the sequence of actions that are taken when the preemptive concurrency state is executed :return: """ logger.debug("Starting execution of {0}{1}".format(self, " (backwards)" if self.backward_execution else "")) self.setup_run() try: ...
This defines the sequence of actions that are taken when the preemptive concurrency state is executed :return:
def taskdir(self): """ Return the directory under which all artefacts are stored. """ return os.path.join(self.BASE, self.TAG, self.task_family)
Return the directory under which all artefacts are stored.
def get_required(self, name): """ Gets all required dependencies by their name. At least one dependency must be present. If no dependencies was found it throws a [[ReferenceException]] :param name: the dependency name to locate. :return: a list with found dependencies. ...
Gets all required dependencies by their name. At least one dependency must be present. If no dependencies was found it throws a [[ReferenceException]] :param name: the dependency name to locate. :return: a list with found dependencies.
def get_grouped_opcodes(self, n=3): """ Isolate change clusters by eliminating ranges with no changes. Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint >>> a = map(str, range...
Isolate change clusters by eliminating ranges with no changes. Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint >>> a = map(str, range(1,40)) >>> b = a[:] >>> b[8:8] ...
def get_managed( name, template, source, source_hash, source_hash_name, user, group, mode, attrs, saltenv, context, defaults, skip_verify=False, **kwargs): ''' Return the managed file data for file.ma...
Return the managed file data for file.managed name location where the file lives on the server template template format source managed source file source_hash hash of the source file source_hash_name When ``source_hash`` refers to a remote file, this spec...
def load(self, name): """ If not yet in the cache, load the named template and compiles it, placing it into the cache. If in cache, return the cached template. """ if self.reload: self._maybe_purge_cache() template = self.cache.get(name) if ...
If not yet in the cache, load the named template and compiles it, placing it into the cache. If in cache, return the cached template.
def rst2md(text): """Converts the RST text from the examples docstrigs and comments into markdown text for the IPython notebooks""" top_heading = re.compile(r'^=+$\s^([\w\s-]+)^=+$', flags=re.M) text = re.sub(top_heading, r'# \1', text) math_eq = re.compile(r'^\.\. math::((?:.+)?(?:\n+^ .+)*)', f...
Converts the RST text from the examples docstrigs and comments into markdown text for the IPython notebooks
def field2choices(self, field, **kwargs): """Return the dictionary of OpenAPI field attributes for valid choices definition :param Field field: A marshmallow field. :rtype: dict """ attributes = {} comparable = [ validator.comparable for validato...
Return the dictionary of OpenAPI field attributes for valid choices definition :param Field field: A marshmallow field. :rtype: dict
def md5sum(filename, blocksize=8192): """Get the MD5 checksum of a file.""" with open(filename, 'rb') as fh: m = hashlib.md5() while True: data = fh.read(blocksize) if not data: break m.update(data) retur...
Get the MD5 checksum of a file.
def init_default_config(self, path): ''' Initialize the config object and load the default configuration. The path to the config file must be provided. The name of the application is read from the config file. The config file stores the description and the default values for ...
Initialize the config object and load the default configuration. The path to the config file must be provided. The name of the application is read from the config file. The config file stores the description and the default values for all configurations including the appl...
def check(text): """Check the text.""" err = "airlinese.misc" msg = u"'{}' is airlinese." airlinese = [ "enplan(?:e|ed|ing|ement)", "deplan(?:e|ed|ing|ement)", "taking off momentarily", ] return existence_check(text, airlinese, err, msg)
Check the text.
def create_sqlite_backup_db(audit_tables): """ return an inspector object """ #we always want to create a whole new DB, so delete the old one first #if it exists. try: Popen("rm %s"%(config.get('sqlite', 'backup_url')), shell=True) logging.warn("Old sqlite backup DB removed")...
return an inspector object