code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def to_input_req(self): """Converts the ``self`` instance to the desired input request format. Returns: dict: Containing the "WarmStartType" and "ParentHyperParameterTuningJobs" as the first class fields. Examples: >>> warm_start_config = WarmStartConfig(warm_start_type...
Converts the ``self`` instance to the desired input request format. Returns: dict: Containing the "WarmStartType" and "ParentHyperParameterTuningJobs" as the first class fields. Examples: >>> warm_start_config = WarmStartConfig(warm_start_type=WarmStartTypes.TransferLearning,pa...
def remove_leading_zeros(num: str) -> str: """ Strips zeros while handling -, M, and empty strings """ if not num: return num if num.startswith('M'): ret = 'M' + num[1:].lstrip('0') elif num.startswith('-'): ret = '-' + num[1:].lstrip('0') else: ret = num.lstr...
Strips zeros while handling -, M, and empty strings
def dSbr_dV(self, Yf, Yt, V, buses=None, branches=None): """ Based on dSbr_dV.m from MATPOWER by Ray Zimmerman, developed at PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more information. @return: The branch power flow vectors and the partial derivatives of ...
Based on dSbr_dV.m from MATPOWER by Ray Zimmerman, developed at PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more information. @return: The branch power flow vectors and the partial derivatives of branch power flow w.r.t voltage magnitude and voltage angle. ...
def get(self): """ Constructs a ExecutionContextContext :returns: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextContext :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextContext """ return ExecutionContextContext( ...
Constructs a ExecutionContextContext :returns: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextContext :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextContext
def JoinPath(self, path_segments): """Joins the path segments into a path. Args: path_segments (list[str]): path segments. Returns: str: joined path segments prefixed with the path separator. """ # For paths on Windows we need to make sure to handle the first path # segment correct...
Joins the path segments into a path. Args: path_segments (list[str]): path segments. Returns: str: joined path segments prefixed with the path separator.
def within_line(self, viewer, points, p_start, p_stop, canvas_radius): """Points `points` and line endpoints `p_start`, `p_stop` are in data coordinates. Return True for points within the line defined by a line from p_start to p_end and within `canvas_radius`. The distance betwee...
Points `points` and line endpoints `p_start`, `p_stop` are in data coordinates. Return True for points within the line defined by a line from p_start to p_end and within `canvas_radius`. The distance between points is scaled by the viewer's canvas scale.
def variant_stats_from_variant(variant, metadata, merge_fn=(lambda all_stats: \ max(all_stats, key=(lambda stats: stats.tumor_stats.depth)))): """Parse the variant calling stats from a variant called from multiple variant ...
Parse the variant calling stats from a variant called from multiple variant files. The stats are merged based on `merge_fn` Parameters ---------- variant : varcode.Variant metadata : dict Dictionary of variant file to variant calling metadata from that file merge_fn : function F...
def parse_name(self): """This function uses string patterns to match a title cased name. This is done in a loop until there are no more names to match so as to be able to include surnames etc. in the output.""" name = [] while True: # Match the current char until it d...
This function uses string patterns to match a title cased name. This is done in a loop until there are no more names to match so as to be able to include surnames etc. in the output.
def write_uint16(self, value, little_endian=True): """ Pack the value as an unsigned integer and write 2 bytes to the stream. Args: value: little_endian (bool): specify the endianness. (Default) Little endian. Returns: int: the number of bytes writte...
Pack the value as an unsigned integer and write 2 bytes to the stream. Args: value: little_endian (bool): specify the endianness. (Default) Little endian. Returns: int: the number of bytes written.
def hard_wrap(self): """Grammar for hard wrap linebreak. You don't need to add two spaces at the end of a line. """ self.linebreak = re.compile(r'^ *\n(?!\s*$)') self.text = re.compile( r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)' )
Grammar for hard wrap linebreak. You don't need to add two spaces at the end of a line.
def create_tx(self, txins=None, txouts=None, lock_time=0): """Create unsigned rawtx with given txins/txouts as json data. <txins>: '[{"txid" : hexdata, "index" : integer}, ...]' <txouts>: '[{"address" : hexdata, "value" : satoshis}, ...]' """ txins = [] if txins is None else txin...
Create unsigned rawtx with given txins/txouts as json data. <txins>: '[{"txid" : hexdata, "index" : integer}, ...]' <txouts>: '[{"address" : hexdata, "value" : satoshis}, ...]'
def do_handle_log(self, workunit, level, *msg_elements): """Implementation of Reporter callback.""" if not self.is_under_main_root(workunit): return # If the element is a (msg, detail) pair, we ignore the detail. There's no # useful way to display it on the console. elements = [e if isinstanc...
Implementation of Reporter callback.
def _fetch(self, request): """ Fetch using the OkHttpClient """ client = self.client #: Dispatch the async call call = Call(__id__=client.newCall(request.request)) call.enqueue(request.handler) #: Save the call reference request.call = call
Fetch using the OkHttpClient
def _make_path(self, items): '''Returns a relative path for the given dictionary of items. Uses this url rule's url pattern and replaces instances of <var_name> with the appropriate value from the items dict. ''' for key, val in items.items(): if not isinstance(val, ...
Returns a relative path for the given dictionary of items. Uses this url rule's url pattern and replaces instances of <var_name> with the appropriate value from the items dict.
def evaluate_policy(self, sigma): """ Compute the value of a policy. Parameters ---------- sigma : array_like(int, ndim=1) Policy vector, of length n. Returns ------- v_sigma : ndarray(float, ndim=1) Value vector of `sigma`, of le...
Compute the value of a policy. Parameters ---------- sigma : array_like(int, ndim=1) Policy vector, of length n. Returns ------- v_sigma : ndarray(float, ndim=1) Value vector of `sigma`, of length n.
def _maybe_club(self, list_of_dicts): """ If all keys in a list of dicts are identical, values from each ``dict`` are clubbed, i.e. inserted under a common column heading. If the keys are not identical ``None`` is returned, and the list should be converted to HTML per the normal...
If all keys in a list of dicts are identical, values from each ``dict`` are clubbed, i.e. inserted under a common column heading. If the keys are not identical ``None`` is returned, and the list should be converted to HTML per the normal ``convert`` function. Parameters -------...
def __remove_index(self, ids): """remove affected ids from the index""" if not ids: return ids = ",".join((str(id) for id in ids)) self.execute("DELETE FROM fact_index where id in (%s)" % ids)
remove affected ids from the index
def _collective_with_groups(self, x, mesh_axes, collective): """Grouped collective, (across the given dimensions). Args: x: a LaidOutTensor mesh_axes: a list of integers - the mesh dimensions to be reduced collective: fn from list(tf.Tensor), list(device) -> list(tf.Tensor) Returns: ...
Grouped collective, (across the given dimensions). Args: x: a LaidOutTensor mesh_axes: a list of integers - the mesh dimensions to be reduced collective: fn from list(tf.Tensor), list(device) -> list(tf.Tensor) Returns: a LaidOutTensor
def book(symbol=None, token='', version=''): '''Book shows IEX’s bids and asks for given symbols. https://iexcloud.io/docs/api/#deep-book Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result ''' ...
Book shows IEX’s bids and asks for given symbols. https://iexcloud.io/docs/api/#deep-book Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result
def html_entity_decode_codepoint(self, m, defs=htmlentities.codepoint2name): """ decode html entity into one of the codepoint2name """ try: char = defs[m.group(1)] return "&{char};".format(char=char) except ValueErr...
decode html entity into one of the codepoint2name
def besj(self, x, n): ''' Function BESJ calculates Bessel function of first kind of order n Arguments: n - an integer (>=0), the order x - value at which the Bessel function is required -------------------- C++ Mathematical Library Converted from e...
Function BESJ calculates Bessel function of first kind of order n Arguments: n - an integer (>=0), the order x - value at which the Bessel function is required -------------------- C++ Mathematical Library Converted from equivalent FORTRAN library Converte...
def d3logpdf_dlink3(self, inv_link_f, y, Y_metadata=None): """ Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{-2(v+1)((y_{i} - \lambda(f_{i}))^3 - 3(y_{i} - \lambda(f_{i}...
Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = \\frac{-2(v+1)((y_{i} - \lambda(f_{i}))^3 - 3(y_{i} - \lambda(f_{i})) \\sigma^{2} v))}{((y_{i} - \lambda(f_{i})) + \\sigma^{2} v)^3} :par...
def parse_environment_data(block): """ Parse the environment block into a Python dictionary. @warn: Deprecated since WinAppDbg 1.5. @note: Values of duplicated keys are joined using null characters. @type block: list of str @param block: List of strings as returned by...
Parse the environment block into a Python dictionary. @warn: Deprecated since WinAppDbg 1.5. @note: Values of duplicated keys are joined using null characters. @type block: list of str @param block: List of strings as returned by L{get_environment_data}. @rtype: dict(str S{...
def update_batch(self, **kwargs): """ Simplistic batch update operation implemented in terms of `replace()`. Assumes that: - Request and response schemas contains lists of items. - Request items define a primary key identifier - The entire batch succeeds or fails tog...
Simplistic batch update operation implemented in terms of `replace()`. Assumes that: - Request and response schemas contains lists of items. - Request items define a primary key identifier - The entire batch succeeds or fails together.
def getAllClasses(self, hide_base_schemas=True, hide_implicit_types=True): """ * hide_base_schemas: by default, obscure all RDF/RDFS/OWL/XML stuff * hide_implicit_types: don't make any inference based on rdf:type declarations """ query = """SELECT DISTINCT ?x ?c ...
* hide_base_schemas: by default, obscure all RDF/RDFS/OWL/XML stuff * hide_implicit_types: don't make any inference based on rdf:type declarations
def resetPassword(self, attempt, newPassword): """ @param attempt: L{_PasswordResetAttempt} reset the password of the user who initiated C{attempt} to C{newPassword}, and afterward, delete the attempt and any persistent sessions that belong to the user """ self....
@param attempt: L{_PasswordResetAttempt} reset the password of the user who initiated C{attempt} to C{newPassword}, and afterward, delete the attempt and any persistent sessions that belong to the user
def fit_naa(self, reject_outliers=3.0, fit_lb=1.8, fit_ub=2.4, phase_correct=True): """ Fit a Lorentzian function to the NAA peak at ~ 2 ppm. Example of fitting inverted peak: Foerster et al. 2013, An imbalance between excitatory and inhibitory neurotransmitters in amyo...
Fit a Lorentzian function to the NAA peak at ~ 2 ppm. Example of fitting inverted peak: Foerster et al. 2013, An imbalance between excitatory and inhibitory neurotransmitters in amyothrophic lateral sclerosis revealed by use of 3T proton MRS
def midi_outputs(self): """ :return: A list of MIDI output :class:`Ports`. """ return self.client.get_ports(is_midi=True, is_physical=True, is_output=True)
:return: A list of MIDI output :class:`Ports`.
def get_diff_amounts(self): """Gets list of total diff :return: List of total diff between 2 consecutive commits since start """ diffs = [] last_commit = None for commit in self.repo.iter_commits(): if last_commit is not None: diff = self.get...
Gets list of total diff :return: List of total diff between 2 consecutive commits since start
def delete(self, force=False): """ Deletes the current framework :param force: If True, stops the framework before deleting it :return: True if the framework has been delete, False if is couldn't """ if not force and self._state not in ( Bundle.INSTALLED, ...
Deletes the current framework :param force: If True, stops the framework before deleting it :return: True if the framework has been delete, False if is couldn't
def get_instance(self, payload): """ Build an instance of EngagementContextInstance :param dict payload: Payload response from the API :returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextInstance :rtype: twilio.rest.studio.v1.flow.engagement.enga...
Build an instance of EngagementContextInstance :param dict payload: Payload response from the API :returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextInstance :rtype: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextInstance
def parse_mapping(mapping_file: Optional[str]) -> configparser.ConfigParser: """ Parse the file containing the mappings from hosts to pass entries. Args: mapping_file: Name of the file to parse. If ``None``, the default file from the XDG location is used. """ LOGGER....
Parse the file containing the mappings from hosts to pass entries. Args: mapping_file: Name of the file to parse. If ``None``, the default file from the XDG location is used.
def import_from_netcdf(network, path, skip_time=False): """ Import network data from netCDF file or xarray Dataset at `path`. Parameters ---------- path : string|xr.Dataset Path to netCDF dataset or instance of xarray Dataset skip_time : bool, default False Skip reading in time ...
Import network data from netCDF file or xarray Dataset at `path`. Parameters ---------- path : string|xr.Dataset Path to netCDF dataset or instance of xarray Dataset skip_time : bool, default False Skip reading in time dependent attributes
def get_node_affiliations(self, jid, node): """ Return the affiliations of other jids at a node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the node to query :type node: :class:`str` :raises aioxmpp.errors.XMPP...
Return the affiliations of other jids at a node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the node to query :type node: :class:`str` :raises aioxmpp.errors.XMPPError: as returned by the service :return: The response ...
async def _get_descriptions(self): """Read a column descriptor packet for each column in the result.""" self.fields = [] self.converters = [] use_unicode = self.connection.use_unicode conn_encoding = self.connection.encoding description = [] for i in range(self.fi...
Read a column descriptor packet for each column in the result.
def resolve_for(self, node, exact=None): """ Resolves this query relative to the given node. Args: node (node.Base): The node relative to which this query should be resolved. exact (bool, optional): Whether to exactly match text. Returns: list[Elemen...
Resolves this query relative to the given node. Args: node (node.Base): The node relative to which this query should be resolved. exact (bool, optional): Whether to exactly match text. Returns: list[Element]: A list of elements matched by this query.
def get_connection(self, name): """ An individual connection. :param name: The connection name :type name: str """ return self._api_get('/api/connections/{0}'.format( urllib.parse.quote_plus(name) ))
An individual connection. :param name: The connection name :type name: str
def _maybe_start_instance(instance): """Starts instance if it's stopped, no-op otherwise.""" if not instance: return if instance.state['Name'] == 'stopped': instance.start() while True: print(f"Waiting for {instance} to start.") instance.reload() if instance.state['Name'] == 'runn...
Starts instance if it's stopped, no-op otherwise.
def to_native(key): """Find the native name for the language specified by key. >>> to_native('br') u'brezhoneg' >>> to_native('sw') u'Kiswahili' """ item = find(whatever=key) if not item: raise NonExistentLanguageError('Language does not exist.') return item[u'native']
Find the native name for the language specified by key. >>> to_native('br') u'brezhoneg' >>> to_native('sw') u'Kiswahili'
def save_raw_data_from_data_queue(data_queue, filename, mode='a', title='', scan_parameters=None): # mode="r+" to append data, raw_data_file_h5 must exist, "w" to overwrite raw_data_file_h5, "a" to append data, if raw_data_file_h5 does not exist it is created '''Writing raw data file from data queue If you ne...
Writing raw data file from data queue If you need to write raw data once in a while this function may make it easy for you.
def filter_macro(func, *args, **kwargs): """ Promotes a function that returns a filter into its own filter type. Example:: @filter_macro def String(): return Unicode | Strip | NotEmpty # You can now use `String` anywhere you would use a regular Filter: (String ...
Promotes a function that returns a filter into its own filter type. Example:: @filter_macro def String(): return Unicode | Strip | NotEmpty # You can now use `String` anywhere you would use a regular Filter: (String | Split(':')).apply('...') You can also use ``fi...
def activate(self, resource=None, timeout=3, wait_for_finish=False): """ Activate this package on the SMC :param list resource: node href's to activate on. Resource is only required for software upgrades :param int timeout: timeout between queries :raises TaskRunF...
Activate this package on the SMC :param list resource: node href's to activate on. Resource is only required for software upgrades :param int timeout: timeout between queries :raises TaskRunFailed: failure during activation (downloading, etc) :rtype: TaskOperationPoller
def path_to_songname(path: str)->str: """ Extracts song name from a filepath. Used to identify which songs have already been fingerprinted on disk. """ return os.path.splitext(os.path.basename(path))[0]
Extracts song name from a filepath. Used to identify which songs have already been fingerprinted on disk.
def bschoc(value, ndim, lenvals, array, order): """ Do a binary search for a given value within a character string array, accompanied by an order vector. Return the index of the matching array entry, or -1 if the key value is not found. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bscho...
Do a binary search for a given value within a character string array, accompanied by an order vector. Return the index of the matching array entry, or -1 if the key value is not found. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bschoc_c.html :param value: Key value to be found in array. ...
def update_points(self): """ 统一变为多个点组成的多边形,用于处理碰撞 """ x, y, w, h = self.x, self.y, self.w, self.h self.points = (x, y, x + w, y, x + w, y + h, x, y + h)
统一变为多个点组成的多边形,用于处理碰撞
def ystep(self): r"""Minimise Augmented Lagrangian with respect to :math:`\mathbf{y}`. """ self.Y = np.asarray(sp.prox_l2( self.AX + self.U, (self.lmbda/self.rho)*self.Wtvna, axis=self.saxes), dtype=self.dtype)
r"""Minimise Augmented Lagrangian with respect to :math:`\mathbf{y}`.
def export_txt(obj, file_name, two_dimensional=False, **kwargs): """ Exports control points as a text file. For curves the output is always a list of control points. For surfaces, it is possible to generate a 2-dimensional control point output file using ``two_dimensional``. Please see :py:func:`.exch...
Exports control points as a text file. For curves the output is always a list of control points. For surfaces, it is possible to generate a 2-dimensional control point output file using ``two_dimensional``. Please see :py:func:`.exchange.import_txt()` for detailed description of the keyword arguments. ...
def load_registered_fixtures(context): """ Apply fixtures that are registered with the @fixtures decorator. """ # -- SELECT STEP REGISTRY: # HINT: Newer behave versions use runner.step_registry # to be able to support multiple runners, each with its own step_registry. runner = context._runne...
Apply fixtures that are registered with the @fixtures decorator.
def remove_users_from_account_group(self, account_id, group_id, **kwargs): # noqa: E501 """Remove users from a group. # noqa: E501 An endpoint for removing users from groups. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/policy-groups/{groupID}/users...
Remove users from a group. # noqa: E501 An endpoint for removing users from groups. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/policy-groups/{groupID}/users -d '[0162056a9a1586f30242590700000000,0117056a9a1586f30242590700000000]' -H 'content-type: applicat...
def sub_tag(self, path, follow=True): """Returns direct sub-record with given tag name or None. Path can be a simple tag name, in which case the first direct sub-record of this record with the matching tag is returned. Path can also consist of several tags separated by slashes, in that ...
Returns direct sub-record with given tag name or None. Path can be a simple tag name, in which case the first direct sub-record of this record with the matching tag is returned. Path can also consist of several tags separated by slashes, in that case sub-records are searched recursively...
def canonical_peer( self, peer ): """ Get the canonical peer name """ their_host, their_port = url_to_host_port( peer ) if their_host in ['127.0.0.1', '::1']: their_host = 'localhost' return "%s:%s" % (their_host, their_port)
Get the canonical peer name
def delete_snmp_template(auth, url, template_name= None, template_id= None): """ Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific snmp template from the IMC system :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param u...
Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific snmp template from the IMC system :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :par...
def postprocess(self, calc, with_module=None, dry_run=None): ''' Invokes module(s) API NB: this is the PUBLIC method @returns apps_dict ''' for appname, appclass in self.Apps.items(): if with_module and with_module != appname: continue run_permitt...
Invokes module(s) API NB: this is the PUBLIC method @returns apps_dict
def create_sconstruct(self, project_dir='', sayyes=False): """Creates a default SConstruct file""" project_dir = util.check_dir(project_dir) sconstruct_name = 'SConstruct' sconstruct_path = util.safe_join(project_dir, sconstruct_name) local_sconstruct_path = util.safe_join( ...
Creates a default SConstruct file
def expect(self, pattern, timeout=-1): """Waits on the given pattern to appear in std_out""" if self.blocking: raise RuntimeError("expect can only be used on non-blocking commands.") try: self.subprocess.expect(pattern=pattern, timeout=timeout) except pexpect.EO...
Waits on the given pattern to appear in std_out
def diff(cls, a, b, ignore_formatting=False): """Returns two FSArrays with differences underlined""" def underline(x): return u'\x1b[4m%s\x1b[0m' % (x,) def blink(x): return u'\x1b[5m%s\x1b[0m' % (x,) a_rows = [] b_rows = [] max_width = max([len(row) for row in a] + [len(...
Returns two FSArrays with differences underlined
def _set_cos_traffic_class(self, v, load=False): """ Setter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list) If this variable is read-only (config: false) in the source YANG file, then _set_cos_traffic_class is considered as a private method. Backends looking...
Setter method for cos_traffic_class, mapped from YANG variable /qos/map/cos_traffic_class (list) If this variable is read-only (config: false) in the source YANG file, then _set_cos_traffic_class is considered as a private method. Backends looking to populate this variable should do so via calling thisO...
def transform(self, X): """ Add the features calculated using the timeseries_container and add them to the corresponding rows in the input pandas.DataFrame X. To save some computing time, you should only include those time serieses in the container, that you need. You can set th...
Add the features calculated using the timeseries_container and add them to the corresponding rows in the input pandas.DataFrame X. To save some computing time, you should only include those time serieses in the container, that you need. You can set the timeseries container with the method :func...
def _sd_handler(self, desc_type, unit, desc, show_on_keypad): """Text description""" if desc_type not in self._descriptions_in_progress: LOG.debug("Text description response ignored for " + str(desc_type)) return (max_units, results, callback) = self._descriptions_in_pro...
Text description
def request_openbus(self, service, endpoint, **kwargs): """Make a request to the given endpoint of the ``openbus`` server. This returns the plain JSON (dict) response which can then be parsed using one of the implemented types. Args: service (str): Service to fetch ('bus' o...
Make a request to the given endpoint of the ``openbus`` server. This returns the plain JSON (dict) response which can then be parsed using one of the implemented types. Args: service (str): Service to fetch ('bus' or 'geo'). endpoint (str): Endpoint to send the request ...
def enbase64(byte_str): """ Encode bytes/strings to base64. Args: - ``byte_str``: The string or bytes to base64 encode. Returns: - byte_str encoded as base64. """ # Python 3: base64.b64encode() expects type byte if isinstance(byte_str, str) and not PYTHON2: byte_s...
Encode bytes/strings to base64. Args: - ``byte_str``: The string or bytes to base64 encode. Returns: - byte_str encoded as base64.
def load_profiles_definitions(filename): """ Load the registered profiles defined in the file filename. This is a yml file that defines the basic characteristics of each profile with the following variables: It produces a dictionary that can be accessed with the a string that defines the profi...
Load the registered profiles defined in the file filename. This is a yml file that defines the basic characteristics of each profile with the following variables: It produces a dictionary that can be accessed with the a string that defines the profile organization and name in the form <org>:<profile n...
def run(argv=argv): """Runs the search_google command line tool. This function runs the search_google command line tool in a terminal. It was intended for use inside a py file (.py) to be executed using python. Notes: * ``[q]`` reflects key ``q`` in the ``cseargs`` parameter for :class:`api.result...
Runs the search_google command line tool. This function runs the search_google command line tool in a terminal. It was intended for use inside a py file (.py) to be executed using python. Notes: * ``[q]`` reflects key ``q`` in the ``cseargs`` parameter for :class:`api.results` * Optional argumen...
def wait_for_import_to_complete(self, import_id, region='us-east-1'): """ Monitors the status of aws import, waiting for it to complete, or error out :param import_id: id of import task to monitor """ task_running = True while task_running: import_status_cmd =...
Monitors the status of aws import, waiting for it to complete, or error out :param import_id: id of import task to monitor
def extendManager(mixinClass): ''' Use as a class decorator to add extra methods to your model manager. Example usage: class Article(django.db.models.Model): published = models.DateTimeField() ... @extendManager class objects(object): def getPublished(self): return self.filter(published__lte...
Use as a class decorator to add extra methods to your model manager. Example usage: class Article(django.db.models.Model): published = models.DateTimeField() ... @extendManager class objects(object): def getPublished(self): return self.filter(published__lte = django.utils.timezone.now()).order...
def pltnp(point, v1, v2, v3): """ Find the nearest point on a triangular plate to a given point. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnp_c.html :param point: A point in 3-dimensional space. :type point: 3-Element Array of floats :param v1: Vertices of a triangu...
Find the nearest point on a triangular plate to a given point. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnp_c.html :param point: A point in 3-dimensional space. :type point: 3-Element Array of floats :param v1: Vertices of a triangular plate. :type v1: 3-Element Array o...
def require_exp_directory(f): """Decorator to verify that a command is run inside a valid Dallinger experiment directory. """ error = "The current directory is not a valid Dallinger experiment." @wraps(f) def wrapper(**kwargs): if not verify_directory(kwargs.get("verbose")): ...
Decorator to verify that a command is run inside a valid Dallinger experiment directory.
def get_python_symbol_icons(oedata): """Return a list of icons for oedata of a python file.""" class_icon = ima.icon('class') method_icon = ima.icon('method') function_icon = ima.icon('function') private_icon = ima.icon('private1') super_private_icon = ima.icon('private2') symbols = process...
Return a list of icons for oedata of a python file.
def delete(self, filename, storage_type=None, bucket_name=None): """Deletes the specified file, either locally or from S3, depending on the file's storage type.""" if not (storage_type and bucket_name): self._delete_local(filename) else: if storage_type != 's3': ...
Deletes the specified file, either locally or from S3, depending on the file's storage type.
def mapper_from_partial_prior_arguments(self, arguments): """ Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors, keeping existing priors where no mapping is provided. Parameters ---------- arguments: {Prior: Prior} A di...
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors, keeping existing priors where no mapping is provided. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- ...
def analyze(self, scratch, **kwargs): """Run and return the results from the DuplicateScripts plugin. Only takes into account scripts with more than 3 blocks. """ scripts_set = set() for script in self.iter_scripts(scratch): if script[0].type.text == 'define %s': ...
Run and return the results from the DuplicateScripts plugin. Only takes into account scripts with more than 3 blocks.
def from_connections(cls, caption, connections): """Create a new Data Source give a list of Connections.""" root = ET.Element('datasource', caption=caption, version='10.0', inline='true') outer_connection = ET.SubElement(root, 'connection') outer_connection.set('class', 'federated') ...
Create a new Data Source give a list of Connections.
def start(self): """ Starts sending periodic HeartBeat operations. """ def _heartbeat(): if not self._client.lifecycle.is_live: return self._heartbeat() self._heartbeat_timer = self._client.reactor.add_timer(self._heartbeat_interval, _h...
Starts sending periodic HeartBeat operations.
def init_app(self, app): """Initialize extension to the given application. Extension will be registered to `app.extensions` with lower classname as key and instance as value. :param app: Flask application. """ self.init_extension(app) if not hasattr(app, 'exten...
Initialize extension to the given application. Extension will be registered to `app.extensions` with lower classname as key and instance as value. :param app: Flask application.
def log_start(task, logger="TaskLogger"): """Begin logging of a task Convenience function to log a task in the default TaskLogger Parameters ---------- task : str Name of the task to be started logger : str, optional (default: "TaskLogger") Unique name of the logger to retr...
Begin logging of a task Convenience function to log a task in the default TaskLogger Parameters ---------- task : str Name of the task to be started logger : str, optional (default: "TaskLogger") Unique name of the logger to retrieve Returns ------- logger : TaskLo...
def iterate(iterator, n=None): """Efficiently advances the iterator N times; by default goes to its end. The actual loop is done "in C" and hence it is faster than equivalent 'for'. :param n: How much the iterator should be advanced. If None, it will be advanced until the end. """ en...
Efficiently advances the iterator N times; by default goes to its end. The actual loop is done "in C" and hence it is faster than equivalent 'for'. :param n: How much the iterator should be advanced. If None, it will be advanced until the end.
def remote_archive(class_obj: type) -> type: """ Decorator to annotate the RemoteArchive class. Registers the decorated class as the RemoteArchive known type. """ assert isinstance(class_obj, type), "class_obj is not a Class" global _remote_archive_resource_type _remote_archive_resource_type...
Decorator to annotate the RemoteArchive class. Registers the decorated class as the RemoteArchive known type.
def build(outdir): """Blends the generated files and outputs a HTML website""" print("Building your Blended files into a website!") reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("The files are built! You can find them in the " + outdir + "/ directory. Run the...
Blends the generated files and outputs a HTML website
def gamma(ranks_list1,ranks_list2): ''' Goodman and Kruskal's gamma correlation coefficient :param ranks_list1: a list of ranks (integers) :param ranks_list2: a second list of ranks (integers) of equal length with corresponding entries :return: Gamma correlation coefficient (rank correlation ignorin...
Goodman and Kruskal's gamma correlation coefficient :param ranks_list1: a list of ranks (integers) :param ranks_list2: a second list of ranks (integers) of equal length with corresponding entries :return: Gamma correlation coefficient (rank correlation ignoring ties)
def slice_rates_to_data(self, strain): ''' For the strain data, checks to see if seismicity rates have been calculated. If so, each column in the array is sliced and stored as a single vector in the strain.data dictionary with the corresponding magnitude as a key. :param...
For the strain data, checks to see if seismicity rates have been calculated. If so, each column in the array is sliced and stored as a single vector in the strain.data dictionary with the corresponding magnitude as a key. :param strain: Instance of :class: openquake.hmtk.str...
def check_event_coverage(patterns, event_list): """Calculate the ratio of patterns that were extracted.""" proportions = [] for pattern_list in patterns: proportion = 0 for pattern in pattern_list: for node in pattern.nodes(): if node in event_list: ...
Calculate the ratio of patterns that were extracted.
def solver(A, config): """Generate an SA solver given matrix A and a configuration. Parameters ---------- A : array, matrix, csr_matrix, bsr_matrix Matrix to invert, CSR or BSR format preferred for efficiency config : dict A dictionary of solver configuration parameters that is used...
Generate an SA solver given matrix A and a configuration. Parameters ---------- A : array, matrix, csr_matrix, bsr_matrix Matrix to invert, CSR or BSR format preferred for efficiency config : dict A dictionary of solver configuration parameters that is used to generate a smoothe...
def generate_not(self): """ Means that value have not to be valid by this definition. .. code-block:: python {'not': {'type': 'null'}} Valid values for this definition are 'hello', 42, {} ... but not None. Since draft 06 definition can be boolean. False means noth...
Means that value have not to be valid by this definition. .. code-block:: python {'not': {'type': 'null'}} Valid values for this definition are 'hello', 42, {} ... but not None. Since draft 06 definition can be boolean. False means nothing, True means everything is invali...
def transform(self, X, mean=None, lenscale=None): """ Apply the spectral mixture component basis to X. Parameters ---------- X: ndarray (N, d) array of observations where N is the number of samples, and d is the dimensionality of X. mean: ndarray,...
Apply the spectral mixture component basis to X. Parameters ---------- X: ndarray (N, d) array of observations where N is the number of samples, and d is the dimensionality of X. mean: ndarray, optional array of shape (d,) frequency means (one for eac...
def add_directory(self, iso_path=None, rr_name=None, joliet_path=None, file_mode=None, udf_path=None): # type: (Optional[str], Optional[str], Optional[str], int, Optional[str]) -> None ''' Add a directory to the ISO. At least one of an iso_path, joliet_path, or udf...
Add a directory to the ISO. At least one of an iso_path, joliet_path, or udf_path must be provided. Providing joliet_path on a non-Joliet ISO, or udf_path on a non-UDF ISO, is an error. If the ISO contains Rock Ridge, then a Rock Ridge name must be provided. Parameters: iso_...
def get_last_week_range(weekday_start="Sunday"): """ Gets the date for the first and the last day of the previous complete week. :param weekday_start: Either "Monday" or "Sunday", indicating the first day of the week. :returns: A tuple containing two date objects, for the first and the last day of the week...
Gets the date for the first and the last day of the previous complete week. :param weekday_start: Either "Monday" or "Sunday", indicating the first day of the week. :returns: A tuple containing two date objects, for the first and the last day of the week respectively.
def add(self, factory, component, properties=None): # type: (str, str, dict) -> None """ Enqueues the instantiation of the given component :param factory: Factory name :param component: Component name :param properties: Component properties :raise ValueError: Com...
Enqueues the instantiation of the given component :param factory: Factory name :param component: Component name :param properties: Component properties :raise ValueError: Component name already reserved in the queue :raise Exception: Error instantiating the component
def addVariantSet(self): """ Adds a new VariantSet into this repo. """ self._openRepo() dataset = self._repo.getDatasetByName(self._args.datasetName) dataUrls = self._args.dataFiles name = self._args.name if len(dataUrls) == 1: if self._args.na...
Adds a new VariantSet into this repo.
def to_json(self): """ :return: str """ json_dict = self.to_json_basic() json_dict['channels'] = self.relay_channels return json.dumps(json_dict)
:return: str
def winner(self): """Returns either x or o if one of them won, otherwise None""" for c in 'xo': for comb in [(0,3,6), (1,4,7), (2,5,8), (0,1,2), (3,4,5), (6,7,8), (0,4,8), (2,4,6)]: if all(self.spots[p] == c for p in comb): return c return None
Returns either x or o if one of them won, otherwise None
def zoom_out(self): """Zooms out by zoom factor""" zoom = self.grid.grid_renderer.zoom target_zoom = zoom * (1 - config["zoom_factor"]) if target_zoom > config["minimum_zoom"]: self.zoom(target_zoom)
Zooms out by zoom factor
def airspeeds_encode(self, time_boot_ms, airspeed_imu, airspeed_pitot, airspeed_hot_wire, airspeed_ultrasonic, aoa, aoy): ''' The airspeed measured by sensors and IMU time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) airspe...
The airspeed measured by sensors and IMU time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) airspeed_imu : Airspeed estimate from IMU, cm/s (int16_t) airspeed_pitot : Pitot measured forward airpseed, cm/s (int16_t) ...
async def save(self): """Save this subnet.""" if 'vlan' in self._changed_data and self._changed_data['vlan']: # Update uses the ID of the VLAN, not the VLAN object. self._changed_data['vlan'] = self._changed_data['vlan']['id'] if (self._orig_data['vlan'] and ...
Save this subnet.
def _clean_streams(repo, mapped_streams): """Clean mapped standard streams.""" for stream_name in ('stdout', 'stderr'): stream = mapped_streams.get(stream_name) if not stream: continue path = os.path.relpath(stream, start=repo.working_dir) if (path, 0) not in repo.in...
Clean mapped standard streams.
def DSP_callback_toc(self): """ Add new toc time to the DSP_toc list. Will not be called if Tcapture = 0. """ if self.Tcapture > 0: self.DSP_toc.append(time.time()-self.start_time)
Add new toc time to the DSP_toc list. Will not be called if Tcapture = 0.
def headers_for_url(cls, url): """Return the headers only for the given URL as a dict""" response = cls.http_request(url, method='HEAD') if response.status != 200: cls.raise_http_error(response) return Resource.headers_as_dict(response)
Return the headers only for the given URL as a dict
def entropy(self, base = 2): """Compute the entropy of the distribution""" entropy = 0 if not base and self.base: base = self.base for type in self._dist: if not base: entropy += self._dist[type] * -math.log(self._dist[type]) else: ...
Compute the entropy of the distribution
def _get_price_id_for_upgrade(self, package_items, option, value, public=True): """Find the price id for the option and value to upgrade. Deprecated in favor of _get_price_id_for_upgrade_option() :param list package_items: Contains all the items related to an VS :param string option: D...
Find the price id for the option and value to upgrade. Deprecated in favor of _get_price_id_for_upgrade_option() :param list package_items: Contains all the items related to an VS :param string option: Describes type of parameter to be upgraded :param int value: The value of the parame...
def walk_files(args, root, directory, action): """ Recusively go do the subdirectories of the directory, calling the action on each file """ for entry in os.listdir(directory): if is_hidden(args, entry): continue if is_excluded_directory(args, entry): continu...
Recusively go do the subdirectories of the directory, calling the action on each file
def list(context, job_id, sort, limit, where, verbose): """list(context, sort, limit, where, verbose) List all files. >>> dcictl file-list job-id [OPTIONS] :param string sort: Field to apply sort :param integer limit: Max number of rows to return :param string where: An optional filter criter...
list(context, sort, limit, where, verbose) List all files. >>> dcictl file-list job-id [OPTIONS] :param string sort: Field to apply sort :param integer limit: Max number of rows to return :param string where: An optional filter criteria :param boolean verbose: Display verbose output