code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def find_one(cls, pattern, string, flags=0): """JS-like match object. Use index number to get groups, if not match or no group, will return ''. Basic Usage:: >>> from torequests.utils import find_one >>> string = "abcd" >>> find_one("a.*", string) <toreq...
JS-like match object. Use index number to get groups, if not match or no group, will return ''. Basic Usage:: >>> from torequests.utils import find_one >>> string = "abcd" >>> find_one("a.*", string) <torequests.utils.RegMatch object at 0x0705F1D0> >...
def loadf(path, encoding=None, model=None, parser=None): """Deserialize path (.arpa, .gz) to a Python object.""" path = str(path) if path.endswith('.gz'): with gzip.open(path, mode='rt', encoding=encoding) as f: return load(f, model=model, parser=parser) else: with open(path,...
Deserialize path (.arpa, .gz) to a Python object.
def populate_obj(obj, attrs): """Populates an object's attributes using the provided dict """ for k, v in attrs.iteritems(): setattr(obj, k, v)
Populates an object's attributes using the provided dict
def merge_dict_of_lists(adict, indices, pop_later=True, copy=True): """Extend the within a dict of lists. The indices will indicate which list have to be extended by which other list. Parameters ---------- adict: OrderedDict An ordered dictionary of lists indices: list or tuple of 2 it...
Extend the within a dict of lists. The indices will indicate which list have to be extended by which other list. Parameters ---------- adict: OrderedDict An ordered dictionary of lists indices: list or tuple of 2 iterables of int, bot having the same length The indices of the lists...
def conditions(self) -> Dict[str, Dict[str, Union[float, numpy.ndarray]]]: """Nested dictionary containing the values of all condition sequences. See the documentation on property |HydPy.conditions| for further information. """ conditions = {} for subname in NAME...
Nested dictionary containing the values of all condition sequences. See the documentation on property |HydPy.conditions| for further information.
def update(self): """ Fetch and parse stats """ self.frontends = [] self.backends = [] self.listeners = [] csv = [ l for l in self._fetch().strip(' #').split('\n') if l ] if self.failed: return #read fields header to create keys self.fields =...
Fetch and parse stats
def check_elasticsearch(record, *args, **kwargs): """Return permission that check if the record exists in ES index. :params record: A record object. :returns: A object instance with a ``can()`` method. """ def can(self): """Try to search for given record.""" search = request._method...
Return permission that check if the record exists in ES index. :params record: A record object. :returns: A object instance with a ``can()`` method.
def cat(tensors, dim=0): """ Efficient version of torch.cat that avoids a copy if there is only a single element in a list """ assert isinstance(tensors, (list, tuple)) if len(tensors) == 1: return tensors[0] return torch.cat(tensors, dim)
Efficient version of torch.cat that avoids a copy if there is only a single element in a list
async def callproc(self, procname, args=()): """Execute stored procedure procname with args Compatibility warning: PEP-249 specifies that any modified parameters must be returned. This is currently impossible as they are only available by storing them in a server variable and th...
Execute stored procedure procname with args Compatibility warning: PEP-249 specifies that any modified parameters must be returned. This is currently impossible as they are only available by storing them in a server variable and then retrieved by a query. Since stored procedures...
def ordinal(self, num): """ Return the ordinal of num. num can be an integer or text e.g. ordinal(1) returns '1st' ordinal('one') returns 'first' """ if re.match(r"\d", str(num)): try: num % 2 n = num exce...
Return the ordinal of num. num can be an integer or text e.g. ordinal(1) returns '1st' ordinal('one') returns 'first'
def boolean_difference(self, mesh, inplace=False): """ Combines two meshes and retains only the volume in common between the meshes. Parameters ---------- mesh : vtki.PolyData The mesh to perform a union against. inplace : bool, optional ...
Combines two meshes and retains only the volume in common between the meshes. Parameters ---------- mesh : vtki.PolyData The mesh to perform a union against. inplace : bool, optional Updates mesh in-place while returning nothing. Returns ...
def __load_project(path): ''' Load a docker-compose project from path :param path: :return: ''' file_path = __get_docker_file_path(path) if file_path is None: msg = 'Could not find docker-compose file at {0}'.format(path) return __standardize_result(False, ...
Load a docker-compose project from path :param path: :return:
def get_fields_from_job_name(self, job_name): """Analyze a Jenkins job name, producing a dictionary The produced dictionary will include information about the category and subcategory of the job name, and any extra information which could be useful. For each deployment of a Jen...
Analyze a Jenkins job name, producing a dictionary The produced dictionary will include information about the category and subcategory of the job name, and any extra information which could be useful. For each deployment of a Jenkins dashboard, an implementation of this functio...
def to_array(self): """ Serializes this PreCheckoutQuery to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(PreCheckoutQuery, self).to_array() array['id'] = u(self.id) # py2: type unicode, py3: type str arr...
Serializes this PreCheckoutQuery to a dictionary. :return: dictionary representation of this object. :rtype: dict
def expose(self, binder, interface, annotation=None): """Expose the child injector to the parent inject for a binding.""" private_module = self class Provider(object): def get(self): return private_module.private_injector.get_instance( interfac...
Expose the child injector to the parent inject for a binding.
def names_labels(self, do_print=False): """Simple helper function to get all field names and labels """ if do_print: for name, label in zip(self.field_names, self.field_labels): print('%s --> %s' % (str(name), str(label))) return self.field_names, self.field_labels
Simple helper function to get all field names and labels
def inserir(self, id_equipment, id_script): """Inserts a new Related Equipment with Script and returns its identifier :param id_equipment: Identifier of the Equipment. Integer value and greater than zero. :param id_script: Identifier of the Script. Integer value and greater than zero. ...
Inserts a new Related Equipment with Script and returns its identifier :param id_equipment: Identifier of the Equipment. Integer value and greater than zero. :param id_script: Identifier of the Script. Integer value and greater than zero. :return: Dictionary with the following structure: ...
def write(self, fileobj = sys.stdout, indent = u""): """ Recursively write an element and it's children to a file. """ fileobj.write(self.start_tag(indent)) fileobj.write(u"\n")
Recursively write an element and it's children to a file.
def load_plugins(self, plugin_class_name): """ load all available plugins :param plugin_class_name: str, name of plugin class (e.g. 'PreBuildPlugin') :return: dict, bindings for plugins of the plugin_class_name class """ # imp.findmodule('atomic_reactor') doesn't work ...
load all available plugins :param plugin_class_name: str, name of plugin class (e.g. 'PreBuildPlugin') :return: dict, bindings for plugins of the plugin_class_name class
def OxmlElement(nsptag_str, attrs=None, nsdecls=None): """ Return a 'loose' lxml element having the tag specified by *nsptag_str*. *nsptag_str* must contain the standard namespace prefix, e.g. 'a:tbl'. The resulting element is an instance of the custom element class for this tag name if one is defin...
Return a 'loose' lxml element having the tag specified by *nsptag_str*. *nsptag_str* must contain the standard namespace prefix, e.g. 'a:tbl'. The resulting element is an instance of the custom element class for this tag name if one is defined. A dictionary of attribute values may be provided as *attrs*...
def get_prefixed_config(self, section, option, **kwargs): """ TODO. """ cfg = Config.instance() default = cfg.get_expanded(section, option, **kwargs) return cfg.get_expanded(section, "{}_{}".format(self.workflow_type, option), default=default, **kwargs)
TODO.
def relpath(path, start=None): """ Return a relative file path to path either from the current directory or from an optional start directory. For storage objects, "path" and "start" are relative to storage root. "/" are not stripped on storage objects path. The ending slash is required on ...
Return a relative file path to path either from the current directory or from an optional start directory. For storage objects, "path" and "start" are relative to storage root. "/" are not stripped on storage objects path. The ending slash is required on some storage to signify that target is a di...
def exec_after_request_actions(actions, response, **kwargs): """Executes actions of the "after" and "after_METHOD" groups. A "response" var will be injected in the current context. """ current_context["response"] = response groups = ("after_" + flask.request.method.lower(), "after") try: ...
Executes actions of the "after" and "after_METHOD" groups. A "response" var will be injected in the current context.
def updateShape(self): """Call after h.define_shape() to update cell coords""" x = self.tags['x'] y = -self.tags['y'] # Neuron y-axis positive = upwards, so assume pia=0 and cortical depth = neg z = self.tags['z'] for sec in list(self.secs.values()): ...
Call after h.define_shape() to update cell coords
def check_sim_in(self): '''check for FDM packets from runsim''' try: pkt = self.sim_in.recv(17*8 + 4) except socket.error as e: if not e.errno in [ errno.EAGAIN, errno.EWOULDBLOCK ]: raise return if len(pkt) != 17*8 + 4: # w...
check for FDM packets from runsim
def format_crypto_units(input_quantity, input_type, output_type, coin_symbol=None, print_cs=False, safe_trimming=False, round_digits=0): ''' Take an input like 11002343 satoshis and convert it to another unit (e.g. BTC) and format it with appropriate units if coin_symbol is supplied and print_cs == True th...
Take an input like 11002343 satoshis and convert it to another unit (e.g. BTC) and format it with appropriate units if coin_symbol is supplied and print_cs == True then the units will be added (e.g. BTC or satoshis) Smart trimming gets rid of trailing 0s in the decimal place, except for satoshis (irrelevant) ...
def get_arsc_info(arscobj): """ Return a string containing all resources packages ordered by packagename, locale and type. :param arscobj: :class:`~ARSCParser` :return: a string """ buff = "" for package in arscobj.get_packages_names(): buff += package + ":\n" for locale in ...
Return a string containing all resources packages ordered by packagename, locale and type. :param arscobj: :class:`~ARSCParser` :return: a string
def parse_iparamvalue(self, tup_tree): """ Parse expected IPARAMVALUE element. I.e. :: <!ELEMENT IPARAMVALUE (VALUE | VALUE.ARRAY | VALUE.REFERENCE | INSTANCENAME | CLASSNAME | QUALIFIER.DECLARATION | ...
Parse expected IPARAMVALUE element. I.e. :: <!ELEMENT IPARAMVALUE (VALUE | VALUE.ARRAY | VALUE.REFERENCE | INSTANCENAME | CLASSNAME | QUALIFIER.DECLARATION | CLASS | INSTANCE | VALUE.NAMEDINSTAN...
def summary(self): """ :rtype: twilio.rest.insights.v1.summary.CallSummaryList """ if self._summary is None: self._summary = CallSummaryList(self) return self._summary
:rtype: twilio.rest.insights.v1.summary.CallSummaryList
def set_feature_generator(self): """Generates proteins with quant from the lookup table""" self.features = preparation.build_proteintable(self.lookup, self.headerfields, self.mergecutoff, ...
Generates proteins with quant from the lookup table
def _estimate_transforms(self, nsamples): """ Uses block matrix inversion identities to quickly estimate transforms. After a bit of matrix math we can isolate a transform matrix (# features x # features) that is independent of any sample we are explaining. It is the result of averaging over ...
Uses block matrix inversion identities to quickly estimate transforms. After a bit of matrix math we can isolate a transform matrix (# features x # features) that is independent of any sample we are explaining. It is the result of averaging over all feature permutations, but we just use a fixed...
def join(self, other): """ Join two headings into a new one. It assumes that self and other are headings that share no common dependent attributes. """ return Heading( [self.attributes[name].todict() for name in self.primary_key] + [other.attributes[name]....
Join two headings into a new one. It assumes that self and other are headings that share no common dependent attributes.
def pixy_set_brightness(self, brightness): """ Sends the setBrightness Pixy command. This method sets the brightness (exposure) of Pixy's camera. :param brightness: range between 0 and 255 with 255 being the brightest setting :returns: No return value...
Sends the setBrightness Pixy command. This method sets the brightness (exposure) of Pixy's camera. :param brightness: range between 0 and 255 with 255 being the brightest setting :returns: No return value.
def _regex_from_encoded_pattern(s): """'foo' -> re.compile(re.escape('foo')) '/foo/' -> re.compile('foo') '/foo/i' -> re.compile('foo', re.I) """ if s.startswith('/') and s.rfind('/') != 0: # Parse it: /PATTERN/FLAGS idx = s.rfind('/') pattern, flags_str = s[1:idx],...
foo' -> re.compile(re.escape('foo')) '/foo/' -> re.compile('foo') '/foo/i' -> re.compile('foo', re.I)
def CountFlowResultsByType(self, client_id, flow_id): """Returns counts of flow results grouped by result type.""" result = collections.Counter() for hr in self.ReadFlowResults(client_id, flow_id, 0, sys.maxsize): key = compatibility.GetName(hr.payload.__class__) result[key] += 1 return res...
Returns counts of flow results grouped by result type.
def BVirial_Abbott(T, Tc, Pc, omega, order=0): r'''Calculates the second virial coefficient using the model in [1]_. Simple fit to the Lee-Kesler equation. .. math:: B_r=B^{(0)}+\omega B^{(1)} B^{(0)}=0.083+\frac{0.422}{T_r^{1.6}} B^{(1)}=0.139-\frac{0.172}{T_r^{4.2}} Paramet...
r'''Calculates the second virial coefficient using the model in [1]_. Simple fit to the Lee-Kesler equation. .. math:: B_r=B^{(0)}+\omega B^{(1)} B^{(0)}=0.083+\frac{0.422}{T_r^{1.6}} B^{(1)}=0.139-\frac{0.172}{T_r^{4.2}} Parameters ---------- T : float Temperatur...
def convert_ram_sp_rf(ADDR_WIDTH=8, DATA_WIDTH=8): ''' Convert RAM: Single-Port, Read-First ''' clk = Signal(bool(0)) we = Signal(bool(0)) addr = Signal(intbv(0)[ADDR_WIDTH:]) di = Signal(intbv(0)[DATA_WIDTH:]) do = Signal(intbv(0)[DATA_WIDTH:]) toVerilog(ram_sp_rf, clk, we, addr, di, do)
Convert RAM: Single-Port, Read-First
def get_icon_url(self, icon): """ Replaces the "icon name" with a full usable URL. * When the icon is an absolute URL, it is used as-is. * When the icon contains a slash, it is relative from the ``STATIC_URL``. * Otherwise, it's relative to the theme url folder. """ ...
Replaces the "icon name" with a full usable URL. * When the icon is an absolute URL, it is used as-is. * When the icon contains a slash, it is relative from the ``STATIC_URL``. * Otherwise, it's relative to the theme url folder.
def average_s_rad(site, hypocenter, reference, pp, normal, dist_to_plane, e, p0, p1, delta_slip): """ Gets the average S-wave radiation pattern given an e-path as described in: Spudich et al. (2013) "Final report of the NGA-West2 directivity working group", PEER report, page 90- 92 and...
Gets the average S-wave radiation pattern given an e-path as described in: Spudich et al. (2013) "Final report of the NGA-West2 directivity working group", PEER report, page 90- 92 and computes: the site to the direct point distance, rd, and the hypocentral distance, r_hyp. :param site: :class:...
def create_time_series(self, label_values, func): """Create a derived measurement to trac `func`. :type label_values: list(:class:`LabelValue`) :param label_values: The measurement's label values. :type func: function :param func: The function to track. :rtype: :class:...
Create a derived measurement to trac `func`. :type label_values: list(:class:`LabelValue`) :param label_values: The measurement's label values. :type func: function :param func: The function to track. :rtype: :class:`DerivedGaugePoint` :return: A read-only measurement ...
def visit_If(self, node): """ Handle iterate variable across branches >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse(''' ... def foo(a): ... if a > 1: b = 1 ... else: b = 3''') >>> pm = passmanager.PassMa...
Handle iterate variable across branches >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse(''' ... def foo(a): ... if a > 1: b = 1 ... else: b = 3''') >>> pm = passmanager.PassManager("test") >>> res = pm.gat...
def infer_struct(value: Mapping[str, GenericAny]) -> Struct: """Infer the :class:`~ibis.expr.datatypes.Struct` type of `value`.""" if not value: raise TypeError('Empty struct type not supported') return Struct(list(value.keys()), list(map(infer, value.values())))
Infer the :class:`~ibis.expr.datatypes.Struct` type of `value`.
def get_broadcast_date(pid): """Take BBC pid (string); extract and return broadcast date as string.""" print("Extracting first broadcast date...") broadcast_etree = open_listing_page(pid + '/broadcasts.inc') original_broadcast_date, = broadcast_etree.xpath( '(//div[@class="grid__inner"]//div' ...
Take BBC pid (string); extract and return broadcast date as string.
def hacking_no_locals(logical_line, tokens, noqa): """Do not use locals() or self.__dict__ for string formatting. Okay: 'locals()' Okay: 'locals' Okay: locals() Okay: print(locals()) H501: print("%(something)" % locals()) H501: LOG.info(_("%(something)") % self.__dict__) Okay: print("%(...
Do not use locals() or self.__dict__ for string formatting. Okay: 'locals()' Okay: 'locals' Okay: locals() Okay: print(locals()) H501: print("%(something)" % locals()) H501: LOG.info(_("%(something)") % self.__dict__) Okay: print("%(something)" % locals()) # noqa
def strip_html(text): """ Get rid of ugly twitter html """ def reply_to(text): replying_to = [] split_text = text.split() for index, token in enumerate(split_text): if token.startswith('@'): replying_to.append(token[1:]) else: message = split_text[...
Get rid of ugly twitter html
def set_layout(self, value): """Set the layout table value. Called on attribute put""" # Can't do this with changes_squashed as it will call update_modified # from another thread and deadlock. Need RLock.is_owned() from update_* part_info = self.run_hooks( LayoutHook(p, c, se...
Set the layout table value. Called on attribute put
async def do_after_sleep(delay: float, coro, *args, **kwargs): """ Performs an action after a set amount of time. This function only calls the coroutine after the delay, preventing asyncio complaints about destroyed coros. :param delay: Time in seconds :param coro: Coroutine to run :param ...
Performs an action after a set amount of time. This function only calls the coroutine after the delay, preventing asyncio complaints about destroyed coros. :param delay: Time in seconds :param coro: Coroutine to run :param args: Arguments to pass to coroutine :param kwargs: Keyword arguments t...
def check_request(name=None): ''' .. versionadded:: 2015.5.0 Return the state request information, if any CLI Example: .. code-block:: bash salt '*' state.check_request ''' notify_path = os.path.join(__opts__['cachedir'], 'req_state.p') serial = salt.payload.Serial(__opts__) ...
.. versionadded:: 2015.5.0 Return the state request information, if any CLI Example: .. code-block:: bash salt '*' state.check_request
def do_upload(post_data, callback=None): """ does the actual upload also sets and generates the user agent string """ encoder = MultipartEncoder(post_data) monitor = MultipartEncoderMonitor(encoder, callback) headers = {'User-Agent': USER_AGENT, 'Content-Type': monitor.content_type} respon...
does the actual upload also sets and generates the user agent string
def trim(stream, **kwargs): """Trim the input so that the output contains one continuous subpart of the input. Args: start: Specify the time of the start of the kept section, i.e. the frame with the timestamp start will be the first frame in the output. end: Specify the time of the ...
Trim the input so that the output contains one continuous subpart of the input. Args: start: Specify the time of the start of the kept section, i.e. the frame with the timestamp start will be the first frame in the output. end: Specify the time of the first frame that will be dropped, i...
def load(self, game_json=None, mode=None): """ Load a game from a serialized JSON representation. The game expects a well defined structure as follows (Note JSON string format): '{ "guesses_made": int, "key": "str:a 4 word", "status": "str: one of pla...
Load a game from a serialized JSON representation. The game expects a well defined structure as follows (Note JSON string format): '{ "guesses_made": int, "key": "str:a 4 word", "status": "str: one of playing, won, lost", "mode": { "digits...
def filter(self, value, model=None, context=None): """ Filter Performs value filtering and returns filtered result. :param value: input value :param model: parent model being validated :param context: object, filtering context :return:...
Filter Performs value filtering and returns filtered result. :param value: input value :param model: parent model being validated :param context: object, filtering context :return: filtered value
def alias_create(indices, alias, hosts=None, body=None, profile=None, source=None): ''' Create an alias for a specific index/indices indices Single or multiple indices separated by comma, use _all to perform the operation on all indices. alias Alias name body Optional defini...
Create an alias for a specific index/indices indices Single or multiple indices separated by comma, use _all to perform the operation on all indices. alias Alias name body Optional definition such as routing or filter as defined in https://www.elastic.co/guide/en/elasticsearch/refer...
def _getEngineRoot(self): """ Retrieves the user-specified engine root directory override (if set), or else performs auto-detection """ override = ConfigurationManager.getConfigKey('rootDirOverride') if override != None: Utility.printStderr('Using user-specified engine root: ' + override) return overrid...
Retrieves the user-specified engine root directory override (if set), or else performs auto-detection
def validate_string_dict(dct): """Validate that the input is a dict with string keys and values. Raises ValueError if not.""" for k,v in dct.iteritems(): if not isinstance(k, basestring): raise ValueError('key %r in dict must be a string' % k) if not isinstance(v, basestring): ...
Validate that the input is a dict with string keys and values. Raises ValueError if not.
def counter(metatdata, value): ''' Returns str(option_string * DropDown Value) e.g. -vvvvv ''' if not str(value).isdigit(): return None arg = str(metatdata['commands'][0]).replace('-', '') repeated_args = arg * int(value) return '-' + repeated_args
Returns str(option_string * DropDown Value) e.g. -vvvvv
def imread(path, grayscale=False, size=None, interpolate="bilinear", channel_first=False, as_uint16=False, num_channels=-1): """ Read image by pypng module. Args: path (str or 'file object'): File path or object to read. grayscale (bool): size (tupple of int): ...
Read image by pypng module. Args: path (str or 'file object'): File path or object to read. grayscale (bool): size (tupple of int): (width, height). If None, output img shape depends on the files to read. channel_first (bool): This argument specif...
def to_task(self): """Return a task object representing this message.""" from google.appengine.api.taskqueue import Task task_args = self.get_task_args().copy() payload = None if 'payload' in task_args: payload = task_args.pop('payload') kwargs = { ...
Return a task object representing this message.
def isIn(val, schema, name = None): # pylint: disable-msg=W0613 """ !~~isIn(data) """ if name is None: name = schema if not _lists.has_key(name): return False try: return val in _lists[name] except TypeError: return False
!~~isIn(data)
def from_flag(cls, flag): """ Return an Endpoint subclass instance based on the given flag. The instance that is returned depends on the endpoint name embedded in the flag. Flags should be of the form ``endpoint.{name}.extra...``, though for legacy purposes, the ``endpoint.`` p...
Return an Endpoint subclass instance based on the given flag. The instance that is returned depends on the endpoint name embedded in the flag. Flags should be of the form ``endpoint.{name}.extra...``, though for legacy purposes, the ``endpoint.`` prefix can be omitted. The ``{name}}`` ...
def _jgezerou16(ins): """ Jumps if top of the stack (16bit) is >= 0 to arg(1) Always TRUE for unsigned """ output = [] value = ins.quad[1] if not is_int(value): output = _16bit_oper(value) output.append('jp %s' % str(ins.quad[2])) return output
Jumps if top of the stack (16bit) is >= 0 to arg(1) Always TRUE for unsigned
def fromutc(self, dt): """ The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. :param dt: A :py:class:`datetime.datetime` object. :raises TypeError: Raised if ``dt`` is not a :py:class:`datetime.datetime` object. :raises ValueError: ...
The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. :param dt: A :py:class:`datetime.datetime` object. :raises TypeError: Raised if ``dt`` is not a :py:class:`datetime.datetime` object. :raises ValueError: Raised if this is called with a ``...
def get_all(self) -> List[Commodity]: """ Loads all non-currency commodities, assuming they are stocks. """ query = ( self.query .order_by(Commodity.namespace, Commodity.mnemonic) ) return query.all()
Loads all non-currency commodities, assuming they are stocks.
def warn(self, message, container=None): """Present the warning `message` to the user, adding information on the location of the related element in the input file.""" if self.source is not None: message = '[{}] '.format(self.source.location) + message if container is not None...
Present the warning `message` to the user, adding information on the location of the related element in the input file.
def addReferences(self, reference, service_uids): """ Add reference analyses to reference """ # TODO Workflow - Analyses. Assignment of refanalysis to Instrument addedanalyses = [] wf = getToolByName(self, 'portal_workflow') bsc = getToolByName(self, 'bika_setup_catalog')...
Add reference analyses to reference
def block1(self): """ Get the Block1 option. :return: the Block1 value """ value = None for option in self.options: if option.number == defines.OptionRegistry.BLOCK1.number: value = parse_blockwise(option.value) return value
Get the Block1 option. :return: the Block1 value
def fai_from_bam(ref_file, bam_file, out_file, data): """Create a fai index with only contigs in the input BAM file. """ contigs = set([x.contig for x in idxstats(bam_file, data)]) if not utils.file_uptodate(out_file, bam_file): with open(ref.fasta_idx(ref_file, data["config"])) as in_handle: ...
Create a fai index with only contigs in the input BAM file.
def _find_day_section_from_indices(indices, split_interval): """ Returns a list with [weekday, section] identifiers found using a list of indices. """ cells_day = 24 * 60 // split_interval rv = [[int(math.floor(i / cells_day)), i % cells_day] for i in indices] return rv
Returns a list with [weekday, section] identifiers found using a list of indices.
def at_depth(self, level): """ Locate the last config item at a specified depth """ return Zconfig(lib.zconfig_at_depth(self._as_parameter_, level), False)
Locate the last config item at a specified depth
def create(self, req, parent, name, mode, fi): """Create and open a file Valid replies: reply_create reply_err """ self.reply_err(req, errno.ENOSYS)
Create and open a file Valid replies: reply_create reply_err
def index_queryset(self, using=None): """ Index current language translation of published objects. TODO: Find a way to index all translations of the given model, not just the current site language's translation. """ translation.activate(settings.LANGUAGE_CODE) re...
Index current language translation of published objects. TODO: Find a way to index all translations of the given model, not just the current site language's translation.
def main(): """Function for command line execution""" parser = ArgumentParser(description="search files using n-grams") parser.add_argument('--path', dest='path', help="where to search", nargs=1, action="store", default=getcwd()) parser.add_argument('--update', dest='update', help="update the index", a...
Function for command line execution
def post(self, request): """ Save the provided data using the class' serializer. Args: request: The request being made. Returns: An ``APIResponse`` instance. If the request was successful the response will have a 200 status code and c...
Save the provided data using the class' serializer. Args: request: The request being made. Returns: An ``APIResponse`` instance. If the request was successful the response will have a 200 status code and contain the serializer's data. Oth...
def editprojecthook(self, project_id, hook_id, url, push=False, issues=False, merge_requests=False, tag_push=False): """ edit an existing hook from a project :param id_: project id :param hook_id: hook id :param url: the new url :return: True if success """ ...
edit an existing hook from a project :param id_: project id :param hook_id: hook id :param url: the new url :return: True if success
def _uptrace(nodelist, node): ''' ๋…ธ๋“œ๋ฅผ ์ƒํ–ฅ ์ถ”์ ํ•œ๋‹ค. ํ˜„ ๋…ธ๋“œ๋กœ๋ถ€ํ„ฐ ์กฐ์ƒ ๋…ธ๋“œ๋“ค์„ ์ฐจ๋ก€๋กœ ์ˆœํšŒํ•˜๋ฉฐ ๋ฐ˜ํ™˜ํ•œ๋‹ค. ๋ฃจํŠธ ๋…ธ๋“œ๋Š” ์ œ์™ธํ•œ๋‹ค. ''' if node.parent_index is None: return parent = nodelist[node.parent_index] for x in _uptrace(nodelist, parent): yield x yield node
๋…ธ๋“œ๋ฅผ ์ƒํ–ฅ ์ถ”์ ํ•œ๋‹ค. ํ˜„ ๋…ธ๋“œ๋กœ๋ถ€ํ„ฐ ์กฐ์ƒ ๋…ธ๋“œ๋“ค์„ ์ฐจ๋ก€๋กœ ์ˆœํšŒํ•˜๋ฉฐ ๋ฐ˜ํ™˜ํ•œ๋‹ค. ๋ฃจํŠธ ๋…ธ๋“œ๋Š” ์ œ์™ธํ•œ๋‹ค.
def slice(x, start, length): """ Collection function: returns an array containing all the elements in `x` from index `start` (or starting from the end if `start` is negative) with the specified `length`. >>> df = spark.createDataFrame([([1, 2, 3],), ([4, 5],)], ['x']) >>> df.select(slice(df.x, 2, 2...
Collection function: returns an array containing all the elements in `x` from index `start` (or starting from the end if `start` is negative) with the specified `length`. >>> df = spark.createDataFrame([([1, 2, 3],), ([4, 5],)], ['x']) >>> df.select(slice(df.x, 2, 2).alias("sliced")).collect() [Row(sli...
def extract_date(self, date): """Extract date from string if necessary. :returns: the extracted date. """ if isinstance(date, six.string_types): try: date = dateutil.parser.parse(date) except ValueError: raise ValueError( ...
Extract date from string if necessary. :returns: the extracted date.
def _install(self, name, autoinstall): '''Check existence of Python module and install it using command pip install if necessary.''' import importlib import pkg_resources spam_spec = importlib.util.find_spec(name) reinstall = False if spam_spec is not None: ...
Check existence of Python module and install it using command pip install if necessary.
def update_user(self, user_is_artist="", artist_level="", artist_specialty="", real_name="", tagline="", countryid="", website="", bio=""): """Update the users profile information :param user_is_artist: Is the user an artist? :param artist_level: If the user is an artist, what level are they ...
Update the users profile information :param user_is_artist: Is the user an artist? :param artist_level: If the user is an artist, what level are they :param artist_specialty: If the user is an artist, what is their specialty :param real_name: The users real name :param tagline: ...
def comment (self, s, **args): """ Write XML comment. """ self.write(u"<!-- ") self.write(s, **args) self.writeln(u" -->")
Write XML comment.
def feed_eof(self): """Send a potentially "ragged" EOF. This method will raise an SSL_ERROR_EOF exception if the EOF is unexpected. """ self._incoming.write_eof() ssldata, appdata = self.feed_ssldata(b'') assert appdata == [] or appdata == [b'']
Send a potentially "ragged" EOF. This method will raise an SSL_ERROR_EOF exception if the EOF is unexpected.
def _add_session(self, session, start_info, groups_by_name): """Adds a new Session protobuffer to the 'groups_by_name' dictionary. Called by _build_session_groups when we encounter a new session. Creates the Session protobuffer and adds it to the relevant group in the 'groups_by_name' dict. Creates the...
Adds a new Session protobuffer to the 'groups_by_name' dictionary. Called by _build_session_groups when we encounter a new session. Creates the Session protobuffer and adds it to the relevant group in the 'groups_by_name' dict. Creates the session group if this is the first time we encounter it. A...
def libvlc_media_event_manager(p_md): '''Get event manager from media descriptor object. NOTE: this function doesn't increment reference counting. @param p_md: a media descriptor object. @return: event manager object. ''' f = _Cfunctions.get('libvlc_media_event_manager', None) or \ _Cfun...
Get event manager from media descriptor object. NOTE: this function doesn't increment reference counting. @param p_md: a media descriptor object. @return: event manager object.
def pipeline_counter(self): """ Get pipeline counter of current job instance. Because instantiating job instance could be performed in different ways and those return different results, we have to check where from to get counter of the pipeline. :return: pipeline counter. ...
Get pipeline counter of current job instance. Because instantiating job instance could be performed in different ways and those return different results, we have to check where from to get counter of the pipeline. :return: pipeline counter.
def patched_model(): """Context Manager that safely patches django.db.Model.__reduce__().""" patched = ('__reduce__', '__getstate__', '__setstate__') originals = {} for patch in patched: try: originals[patch] = getattr(models.Model, patch) except: pass try: ...
Context Manager that safely patches django.db.Model.__reduce__().
def calculate_betweenness_centality(graph: BELGraph, number_samples: int = CENTRALITY_SAMPLES) -> Counter: """Calculate the betweenness centrality over nodes in the graph. Tries to do it with a certain number of samples, but then tries a complete approach if it fails. """ try: res = nx.betweenn...
Calculate the betweenness centrality over nodes in the graph. Tries to do it with a certain number of samples, but then tries a complete approach if it fails.
def write_preferences_file(self): """ Write json preferences file to (platform specific) user data directory, or PmagPy directory if appdirs module is missing. """ user_data_dir = find_pmag_dir.find_user_data_dir("thellier_gui") if not os.path.exists(user_data_dir): ...
Write json preferences file to (platform specific) user data directory, or PmagPy directory if appdirs module is missing.
def list_experiment(args): '''Get experiment information''' nni_config = Config(get_config_filename(args)) rest_port = nni_config.get_config('restServerPort') rest_pid = nni_config.get_config('restServerPid') if not detect_process(rest_pid): print_error('Experiment is not running...') ...
Get experiment information
def way(self, w): """Process each way.""" if w.id not in self.way_ids: return way_points = [] for n in w.nodes: try: way_points.append(Point(n.location.lon, n.location.lat)) except o.InvalidLocationError: logging.debug(...
Process each way.
def is_quoted(arg: str) -> bool: """ Checks if a string is quoted :param arg: the string being checked for quotes :return: True if a string is quoted """ return len(arg) > 1 and arg[0] == arg[-1] and arg[0] in constants.QUOTES
Checks if a string is quoted :param arg: the string being checked for quotes :return: True if a string is quoted
def descendants(self): """Recursively return every dataset below current item.""" for i in self.current_item.items: self.move_to(i) if i.type == TYPE_COLLECTION: for c in self.children: yield c else: yield i ...
Recursively return every dataset below current item.
def chi_eff(mass1, mass2, spin1z, spin2z): """Returns the effective spin from mass1, mass2, spin1z, and spin2z.""" return (spin1z * mass1 + spin2z * mass2) / (mass1 + mass2)
Returns the effective spin from mass1, mass2, spin1z, and spin2z.
def _getScalesDiag(self,termx=0): """ Uses 2 term single trait model to get covar params for initialization Args: termx: non-noise term terms that is used for initialization """ assert self.P>1, 'CVarianceDecomposition:: diagonal init_method allowed onl...
Uses 2 term single trait model to get covar params for initialization Args: termx: non-noise term terms that is used for initialization
def energy_minimize(self, forcefield='UFF', steps=1000, **kwargs): """Perform an energy minimization on a Compound Default beahvior utilizes Open Babel (http://openbabel.org/docs/dev/) to perform an energy minimization/geometry optimization on a Compound by applying a generic force fiel...
Perform an energy minimization on a Compound Default beahvior utilizes Open Babel (http://openbabel.org/docs/dev/) to perform an energy minimization/geometry optimization on a Compound by applying a generic force field Can also utilize OpenMM (http://openmm.org/) to energy minimize ...
def is_connected(C, directed=True): """Check connectivity of the given matrix. Parameters ---------- C : scipy.sparse matrix Count matrix specifying edge weights. directed : bool, optional Whether to compute connected components for a directed or undirected graph. Default is T...
Check connectivity of the given matrix. Parameters ---------- C : scipy.sparse matrix Count matrix specifying edge weights. directed : bool, optional Whether to compute connected components for a directed or undirected graph. Default is True. Returns ------- is_connec...
def create(name, template_body=None, template_url=None, parameters=None, notification_arns=None, disable_rollback=None, timeout_in_minutes=None, capabilities=None, tags=None, on_failure=None, stack_policy_body=None, stack_policy_url=None, region=None, key=None, keyid=None, profile=None): ''' ...
Create a CFN stack. CLI Example: .. code-block:: bash salt myminion boto_cfn.create mystack template_url='https://s3.amazonaws.com/bucket/template.cft' \ region=us-east-1
def get_wu_settings(): ''' Get current Windows Update settings. Returns: dict: A dictionary of Windows Update settings: Featured Updates: Boolean value that indicates whether to display notifications for featured updates. Group Policy Required (Read-only): ...
Get current Windows Update settings. Returns: dict: A dictionary of Windows Update settings: Featured Updates: Boolean value that indicates whether to display notifications for featured updates. Group Policy Required (Read-only): Boolean value that indi...
def get_color(name, number=None): ''' Query the default terminal, for colors, etc. Direct queries supported on xterm, iTerm, perhaps others. Arguments: str: name, one of ('foreground', 'fg', 'background', 'bg', or 'index') # index grabs a palette ind...
Query the default terminal, for colors, etc. Direct queries supported on xterm, iTerm, perhaps others. Arguments: str: name, one of ('foreground', 'fg', 'background', 'bg', or 'index') # index grabs a palette index int: or a "dynamic color n...
def subparsers(self): """Obtain the subparser's object.""" try: return self.__subparsers except AttributeError: parent = super(ArgumentParser, self) self.__subparsers = parent.add_subparsers(title="drill down") self.__subparsers.metavar = "COMMAND"...
Obtain the subparser's object.
def p_expr(self, p): """expr : assignment_expr | expr COMMA assignment_expr """ if len(p) == 2: p[0] = p[1] else: p[0] = ast.Comma(left=p[1], right=p[3])
expr : assignment_expr | expr COMMA assignment_expr