code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def get_config_section(self, name): """ Get a section of a configuration """ if self.config.has_section(name): return self.config.items(name) return []
Get a section of a configuration
def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER): """Sleep and produce a new sleep time. .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\ 2015/03/backoff.html Select a duration between zero and ``current_sleep``. It mi...
Sleep and produce a new sleep time. .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\ 2015/03/backoff.html Select a duration between zero and ``current_sleep``. It might seem counterintuitive to have so much jitter, but `Exponential Backo...
def _max_width_formatter(string, cols, separator='\n'): """Returns a freshly formatted :param string: string to be formatted :type string: basestring or clint.textui.colored.ColoredString :param cols: max width the text to be formatted :type cols: int :param separator: separator to break rows ...
Returns a freshly formatted :param string: string to be formatted :type string: basestring or clint.textui.colored.ColoredString :param cols: max width the text to be formatted :type cols: int :param separator: separator to break rows :type separator: basestring
def xoffset(self, value): """gets/sets the xoffset""" if self._xoffset != value and \ isinstance(value, (int, float, long)): self._xoffset = value
gets/sets the xoffset
def document_delete(index, doc_type, id, hosts=None, profile=None): ''' Delete a document from an index index Index name where the document resides doc_type Type of the document id Document identifier CLI example:: salt myminion elasticsearch.document_delete te...
Delete a document from an index index Index name where the document resides doc_type Type of the document id Document identifier CLI example:: salt myminion elasticsearch.document_delete testindex doctype1 AUx-384m0Bug_8U80wQZ
def unsafe_peek(init): """ Deserialize all the attributes available in the container and pass them in the same order as they come in the container. This is a factory function; returns the actual `peek` routine. Arguments: init: type constructor. Returns: callable: deserializ...
Deserialize all the attributes available in the container and pass them in the same order as they come in the container. This is a factory function; returns the actual `peek` routine. Arguments: init: type constructor. Returns: callable: deserializer (`peek` routine).
def set_distribute_verbatim(self, distribute_verbatim=None): """Sets the distribution rights. :param distribute_verbatim: right to distribute verbatim copies :type distribute_verbatim: ``boolean`` :raise: ``InvalidArgument`` -- ``distribute_verbatim`` is invalid :raise: ``NoAcce...
Sets the distribution rights. :param distribute_verbatim: right to distribute verbatim copies :type distribute_verbatim: ``boolean`` :raise: ``InvalidArgument`` -- ``distribute_verbatim`` is invalid :raise: ``NoAccess`` -- authorization failure *compliance: mandatory -- This me...
def decode (cls, bytes, cmddict=None): """Decodes sequence command attributes from an array of bytes and returns a new SeqCmdAttrs. """ byte = struct.unpack('B', bytes)[0] self = cls() defval = self.default for bit, name, value0, value1, default in SeqCmdAttrs.Table: mask = 1 <<...
Decodes sequence command attributes from an array of bytes and returns a new SeqCmdAttrs.
def make_shell_logfiles_url(host, shell_port, _, instance_id=None): """ Make the url for log-files in heron-shell from the info stored in stmgr. If no instance_id is provided, the link will be to the dir for the whole container. If shell port is not present, it returns None. """ if not shell_port: r...
Make the url for log-files in heron-shell from the info stored in stmgr. If no instance_id is provided, the link will be to the dir for the whole container. If shell port is not present, it returns None.
def get_space_information(self, space_key, expand=None, callback=None): """ Returns information about a space. :param space_key (string): A string containing the key of the space. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the space. Default: Empt...
Returns information about a space. :param space_key (string): A string containing the key of the space. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the space. Default: Empty. :param callback: OPTIONAL: The callback to execute on the resulting data, before ...
def refresh(self): """ Security endpoint for the refresh token, so we can obtain a new token without forcing the user to login again --- post: responses: 200: description: Refresh Successful content: applic...
Security endpoint for the refresh token, so we can obtain a new token without forcing the user to login again --- post: responses: 200: description: Refresh Successful content: application/json: schema: ...
def parseprofile(profilelog, out): ''' Parse a profile log and print the result on screen ''' file = open(out, 'w') # opening the output file print('Opening the profile in %s...' % profilelog) p = pstats.Stats(profilelog, stream=file) # parsing the profile with pstats, and output everything to t...
Parse a profile log and print the result on screen
def write_summary_cnts_all(self): """Write summary of level and depth counts for all active GO Terms.""" cnts = self.get_cnts_levels_depths_recs(set(self.obo.values())) self._write_summary_cnts(cnts)
Write summary of level and depth counts for all active GO Terms.
def get_snippet(self, snippet_key = None): '''Get all/one specific snippet by its key Args: key snippet key (default: None i.e. ALL) return (status code, snippet dict or list thereof) ''' uri = '/'.join([ self.api_uri, self.snippets_suffix ]) if snippet_key: uri = '/'.join([ ...
Get all/one specific snippet by its key Args: key snippet key (default: None i.e. ALL) return (status code, snippet dict or list thereof)
def generic_find_constraint_name(table, columns, referenced, db): """Utility to find a constraint name in alembic migrations""" t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine) for fk in t.foreign_key_constraints: if fk.referred_table.name == referenced and set(fk.column_key...
Utility to find a constraint name in alembic migrations
def add_default_import(cls, module: str): """Add a gated default import to the default imports. In particular, we need to avoid importing 'basilisp.core' before we have finished macro-expanding.""" if module in cls.GATED_IMPORTS: cls.DEFAULT_IMPORTS.swap(lambda s: s.cons(sym...
Add a gated default import to the default imports. In particular, we need to avoid importing 'basilisp.core' before we have finished macro-expanding.
def blob(self, nodeid, tag, start=0, end=0xFFFFFFFF): """ Blobs are stored in sequential nodes with increasing index values. most blobs, like scripts start at index 0, long names start at a specified offset. """ startkey = self.makekey(nodeid, ...
Blobs are stored in sequential nodes with increasing index values. most blobs, like scripts start at index 0, long names start at a specified offset.
def GetArtifactPathDependencies(rdf_artifact): """Return a set of knowledgebase path dependencies. Args: rdf_artifact: RDF artifact object. Returns: A set of strings for the required kb objects e.g. ["users.appdata", "systemroot"] """ deps = set() for source in rdf_artifact.sources: for ar...
Return a set of knowledgebase path dependencies. Args: rdf_artifact: RDF artifact object. Returns: A set of strings for the required kb objects e.g. ["users.appdata", "systemroot"]
def set_permissions(self, object, replace=False): """ Sets the S3 ACL grants for the given object to the appropriate value based on the type of Distribution. If the Distribution is serving private content the ACL will be set to include the Origin Access Identity associated with ...
Sets the S3 ACL grants for the given object to the appropriate value based on the type of Distribution. If the Distribution is serving private content the ACL will be set to include the Origin Access Identity associated with the Distribution. If the Distribution is serving public conte...
def _initURL(self, org_url=None, token_url=None, referer_url=None): """ sets proper URLs for AGOL """ if org_url is not None and org_url != '': if not org_url.startswith('http://') and not org_url.startswith('https://'): org_url = 'http://' +...
sets proper URLs for AGOL
def count_year(year, **kwargs): ''' Lists occurrence counts by year :param year: [int] year range, e.g., ``1990,2000``. Does not support ranges like ``asterisk,2010`` :return: dict Usage:: from pygbif import occurrences occurrences.count_year(year = '1990,2000') ''' ...
Lists occurrence counts by year :param year: [int] year range, e.g., ``1990,2000``. Does not support ranges like ``asterisk,2010`` :return: dict Usage:: from pygbif import occurrences occurrences.count_year(year = '1990,2000')
def handle_connection_repl(client): """ Handles connection. """ client.settimeout(None) # # disable this till we have evidence that it's needed # client.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 0) # # Note: setting SO_RCVBUF on UDS has no effect, see: http://man7.org/linux/man-pages/m...
Handles connection.
def encode(data, checksum=True): """Convert binary to base58 using BASE58_ALPHABET.""" if checksum: data = data + utils.hash256(data)[:4] v, prefix = to_long(256, lambda x: x, iter(data)) data = from_long(v, prefix, BASE58_BASE, lambda v: BASE58_ALPHABET[v]) return data.decode("utf8")
Convert binary to base58 using BASE58_ALPHABET.
def enrich_variants(graph: BELGraph, func: Union[None, str, Iterable[str]] = None): """Add the reference nodes for all variants of the given function. :param graph: The target BEL graph to enrich :param func: The function by which the subject of each triple is filtered. Defaults to the set of protein, rna,...
Add the reference nodes for all variants of the given function. :param graph: The target BEL graph to enrich :param func: The function by which the subject of each triple is filtered. Defaults to the set of protein, rna, mirna, and gene.
def json2py(json_obj): """ Converts the inputted JSON object to a python value. :param json_obj | <variant> """ for key, value in json_obj.items(): if type(value) not in (str, unicode): continue # restore a datetime if re.match('^\d{4}-\d{2}-\d{2} \d{2}...
Converts the inputted JSON object to a python value. :param json_obj | <variant>
def parse_class_names(args): """ parse # classes and class_names if applicable """ num_class = args.num_class if len(args.class_names) > 0: if os.path.isfile(args.class_names): # try to open it to read class names with open(args.class_names, 'r') as f: class_n...
parse # classes and class_names if applicable
def get_ips(v6=False): """Returns all available IPs matching to interfaces, using the windows system. Should only be used as a WinPcapy fallback.""" res = {} for iface in six.itervalues(IFACES): ips = [] for ip in iface.ips: if v6 and ":" in ip: ips.append(ip)...
Returns all available IPs matching to interfaces, using the windows system. Should only be used as a WinPcapy fallback.
def traverse_commits(self) -> Generator[Commit, None, None]: """ Analyze all the specified commits (all of them by default), returning a generator of commits. """ if isinstance(self._path_to_repo, str): self._path_to_repo = [self._path_to_repo] for path_repo...
Analyze all the specified commits (all of them by default), returning a generator of commits.
def process_post_tag(self, bulk_mode, api_tag): """ Create or update a Tag related to a post. :param bulk_mode: If True, minimize db operations by bulk creating post objects :param api_tag: the API data for the Tag :return: the Tag object """ tag = None ...
Create or update a Tag related to a post. :param bulk_mode: If True, minimize db operations by bulk creating post objects :param api_tag: the API data for the Tag :return: the Tag object
def plot(x, y, z, ax=None, **kwargs): r""" Plot iso-probability mass function, converted to sigmas. Parameters ---------- x, y, z : numpy arrays Same as arguments to :func:`matplotlib.pyplot.contour` ax: axes object, optional :class:`matplotlib.axes._subplots.AxesSubplot` to pl...
r""" Plot iso-probability mass function, converted to sigmas. Parameters ---------- x, y, z : numpy arrays Same as arguments to :func:`matplotlib.pyplot.contour` ax: axes object, optional :class:`matplotlib.axes._subplots.AxesSubplot` to plot the contours onto. If unsupplie...
def working_directory(self): """ Get the current working directory. :rtype: str :return: current working directory """ _complain_ifclosed(self.closed) wd = self.fs.get_working_directory() return wd
Get the current working directory. :rtype: str :return: current working directory
def findOrLoadRenderModel(self, pchRenderModelName): "Purpose: Finds a render model we've already loaded or loads a new one" pRenderModel = None for model in self.m_vecRenderModels: if model.getName() == pchRenderModelName: pRenderModel = model b...
Purpose: Finds a render model we've already loaded or loads a new one
def set_font_size(self, size): """Convenience method for just changing font size.""" if self.font.font_size == size: pass else: self.font._set_size(size)
Convenience method for just changing font size.
def login_checking_email(pending_id, ticket, response, detail_url='https://pswdless.appspot.com/rest/detail'): """ Log user in using Passwordless service :param pending_id: PendingExternalToMainUser's id :param ticket: ticket returned from Passwordless :param response: Response object from webapp2 ...
Log user in using Passwordless service :param pending_id: PendingExternalToMainUser's id :param ticket: ticket returned from Passwordless :param response: Response object from webapp2 :param detail_url: url to check ticket and user data :return: a Command that log user in when executed
def _delete_entity(self): """Delete entity from datastore. Attempts to delete using the key_name stored on the object, whether or not the given key is in the datastore. """ if self._is_ndb(): _NDB_KEY(self._model, self._key_name).delete() else: en...
Delete entity from datastore. Attempts to delete using the key_name stored on the object, whether or not the given key is in the datastore.
def _copy_each_include_files_to_include_dir(self): """Copy include header files for each directory to include directory. Copy include header files from rpm/ rpmio/*.h lib/*.h build/*.h sign/*.h to rp...
Copy include header files for each directory to include directory. Copy include header files from rpm/ rpmio/*.h lib/*.h build/*.h sign/*.h to rpm/ include/ rpm/*.h ...
def event_gen( self, timeout_s=None, yield_nones=True, filter_predicate=None, terminal_events=_DEFAULT_TERMINAL_EVENTS): """Yield one event after another. If `timeout_s` is provided, we'll break when no event is received for that many seconds. """ # We will eithe...
Yield one event after another. If `timeout_s` is provided, we'll break when no event is received for that many seconds.
def function(self,p): """Selects and returns one of the patterns in the list.""" pg = p.generator motion_orientation=p.orientation+pi/2.0 new_x = p.x+p.size*pg.x new_y = p.y+p.size*pg.y image_array = pg(xdensity=p.xdensity,ydensity=p.ydensity,bounds=p.bounds, ...
Selects and returns one of the patterns in the list.
def add_event(self, event): """Adds events to the queue. Will ignore events that occur before the settle time for that pin/direction. Such events are assumed to be bouncing. """ # print("Trying to add event:") # print(event) # find out the pin settle time ...
Adds events to the queue. Will ignore events that occur before the settle time for that pin/direction. Such events are assumed to be bouncing.
def _run(self): """ Runs the interval loop. """ def get_next_interval(): start_time = time.time() start = 0 if self.eager else 1 for count in itertools.count(start=start): yield max(start_time + count * self.interval - time.time(), 0) interval...
Runs the interval loop.
def get_bidi_paired_bracket_type_property(value, is_bytes=False): """Get `BPT` property.""" obj = unidata.ascii_bidi_paired_bracket_type if is_bytes else unidata.unicode_bidi_paired_bracket_type if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['bidipairedbr...
Get `BPT` property.
def to_project_config(self, with_packages=False): """Return a dict representation of the config that could be written to disk with `yaml.safe_dump` to get this configuration. :param with_packages bool: If True, include the serialized packages file in the root. :returns dict:...
Return a dict representation of the config that could be written to disk with `yaml.safe_dump` to get this configuration. :param with_packages bool: If True, include the serialized packages file in the root. :returns dict: The serialized profile.
def _collect_paths(element): """ Collect all possible path which leads to `element`. Function returns standard path from root element to this, reverse path, which uses negative indexes for path, also some pattern matches, like "this is element, which has neighbour with id 7" and so on. Args: ...
Collect all possible path which leads to `element`. Function returns standard path from root element to this, reverse path, which uses negative indexes for path, also some pattern matches, like "this is element, which has neighbour with id 7" and so on. Args: element (obj): HTMLElement instanc...
def check_version(version: str): """ Checks given version against code version and determines compatibility. Throws if versions are incompatible. :param version: Given version. """ code_version = parse_version(__version__) given_version = parse_version(version) check_condition(code_vers...
Checks given version against code version and determines compatibility. Throws if versions are incompatible. :param version: Given version.
def find_in_line(line): # type: (str) -> Optional[str] """ Find a version in a line. :param line: :return: """ if not line: return None for method in [find_by_ast, find_version_by_string_lib, find_version_by_regex]: by = method(line) by = validate_string(by) ...
Find a version in a line. :param line: :return:
def add_group(data_api, data_setters, group_index): """Add the data for a whole group. :param data_api the data api from where to get the data :param data_setters the class to push the data to :param group_index the index for this group""" group_type_ind = data_api.group_type_list[group_index] a...
Add the data for a whole group. :param data_api the data api from where to get the data :param data_setters the class to push the data to :param group_index the index for this group
def require_metadata(): "Prevent improper installs without necessary metadata. See #659" egg_info_dir = os.path.join(here, 'setuptools.egg-info') if not os.path.exists(egg_info_dir): msg = ( "Cannot build setuptools without metadata. " "Run `bootstrap.py`." ) ...
Prevent improper installs without necessary metadata. See #659
def revert(self): """Revert file from disk""" index = self.get_stack_index() finfo = self.data[index] filename = finfo.filename if finfo.editor.document().isModified(): self.msgbox = QMessageBox( QMessageBox.Warning, sel...
Revert file from disk
def magic_mprun(self, parameter_s=''): """ Execute a statement under the line-by-line memory profiler from the memory_profiler module. Usage: %mprun -f func1 -f func2 <statement> The given statement (which doesn't require quote marks) is run via the LineProfiler. Profiling is enabled for the...
Execute a statement under the line-by-line memory profiler from the memory_profiler module. Usage: %mprun -f func1 -f func2 <statement> The given statement (which doesn't require quote marks) is run via the LineProfiler. Profiling is enabled for the functions specified by the -f options. The...
def try_read(self, address, size): """Try to read memory content at specified address. If any location was not written before, it returns a tuple (False, None). Otherwise, it returns (True, memory content). """ value = 0x0 for i in range(0, size): addr = ad...
Try to read memory content at specified address. If any location was not written before, it returns a tuple (False, None). Otherwise, it returns (True, memory content).
def release(self, *args, **kwargs): """ Really release the lock only if it's not a sub-lock. Then save the sub-lock status and mark the model as unlocked. """ if not self.field.lockable: return if self.sub_lock_mode: return super(FieldLock,...
Really release the lock only if it's not a sub-lock. Then save the sub-lock status and mark the model as unlocked.
def create(self, company, timezone, country): """Creates a client.""" body = { "CompanyName": company, "TimeZone": timezone, "Country": country} response = self._post("/clients.json", json.dumps(body)) self.client_id = json_to_py(response) ret...
Creates a client.
def generate_private_key(self): """ Generates a private key based on the password. SHA-256 is a member of the SHA-2 cryptographic hash functions designed by the NSA. SHA stands for Secure Hash Algorithm. The password is converted to bytes and hashed with SHA-256. The binary outp...
Generates a private key based on the password. SHA-256 is a member of the SHA-2 cryptographic hash functions designed by the NSA. SHA stands for Secure Hash Algorithm. The password is converted to bytes and hashed with SHA-256. The binary output is converted to a hex representation. ...
def remove_segments(self, segments_to_remove): ''' Remove the faces and vertices for given segments, keeping all others. Args: segments_to_remove: a list of segnments whose vertices will be removed ''' v_ind = self.vertex_indices_in_segments(segments_to_remove) self....
Remove the faces and vertices for given segments, keeping all others. Args: segments_to_remove: a list of segnments whose vertices will be removed
def logical_cores(self): """Return the number of cpu cores as reported to the os. May be different from physical_cores if, ie, intel's hyperthreading is enabled. """ try: return self._logical_cores() except Exception as e: from rez.utils.logging_ ...
Return the number of cpu cores as reported to the os. May be different from physical_cores if, ie, intel's hyperthreading is enabled.
def _get_rename_function(mapper): """ Returns a function that will map names/labels, dependent if mapper is a dict, Series or just a function. """ if isinstance(mapper, (abc.Mapping, ABCSeries)): def f(x): if x in mapper: return mapper[x] else: ...
Returns a function that will map names/labels, dependent if mapper is a dict, Series or just a function.
def get_block_info(self, block): """ Args: block: block number (eg: 223212) block hash (eg: 0000000000000000210b10d620600dc1cc2380bb58eb2408f9767eb792ed31fa) word "last" - this will always return the latest block word "first" - this will always...
Args: block: block number (eg: 223212) block hash (eg: 0000000000000000210b10d620600dc1cc2380bb58eb2408f9767eb792ed31fa) word "last" - this will always return the latest block word "first" - this will always return the first block Returns: ...
def get_col_sep(self): """Return the column separator""" if self.tab_btn.isChecked(): return u"\t" elif self.ws_btn.isChecked(): return None return to_text_string(self.line_edt.text())
Return the column separator
def set_value(self, value): """Set value of the checkbox. Parameters ---------- value : bool value for the checkbox """ if value: self.setCheckState(Qt.Checked) else: self.setCheckState(Qt.Unchecked)
Set value of the checkbox. Parameters ---------- value : bool value for the checkbox
def parse_DID(did, name_type=None): """ Given a DID string, parse it into {'address': ..., 'index': ..., 'name_type'} Raise on invalid DID """ did_pattern = '^did:stack:v0:({}{{25,35}})-([0-9]+)$'.format(OP_BASE58CHECK_CLASS) m = re.match(did_pattern, did) assert m, 'Invalid DID: {}'.format...
Given a DID string, parse it into {'address': ..., 'index': ..., 'name_type'} Raise on invalid DID
def _handle_template(self, token): """Handle a case where a template is at the head of the tokens.""" params = [] default = 1 self._push() while self._tokens: token = self._tokens.pop() if isinstance(token, tokens.TemplateParamSeparator): i...
Handle a case where a template is at the head of the tokens.
def setNetworkName(self, networkName='GRL'): """set Thread Network name Args: networkName: the networkname string to be set Returns: True: successful to set the Thread Networkname False: fail to set the Thread Networkname """ print '%s call s...
set Thread Network name Args: networkName: the networkname string to be set Returns: True: successful to set the Thread Networkname False: fail to set the Thread Networkname
def setAttributeNS(self, namespaceURI, localName, value): ''' Keyword arguments: namespaceURI -- namespace of attribute to create, None is for attributes in no namespace. localName -- local name of new attribute value -- value of new attribute ...
Keyword arguments: namespaceURI -- namespace of attribute to create, None is for attributes in no namespace. localName -- local name of new attribute value -- value of new attribute
def get_ecommerce_client(url_postfix='', site_code=None): """ Get client for fetching data from ecommerce API. Arguments: site_code (str): (Optional) The SITE_OVERRIDES key to inspect for site-specific values url_postfix (str): (Optional) The URL postfix value to append to the ECOMMERCE_API_...
Get client for fetching data from ecommerce API. Arguments: site_code (str): (Optional) The SITE_OVERRIDES key to inspect for site-specific values url_postfix (str): (Optional) The URL postfix value to append to the ECOMMERCE_API_ROOT value. Returns: EdxRestApiClient object
def namespace_splitter(self, value): """ Setter for **self.__namespace_splitter** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format( ...
Setter for **self.__namespace_splitter** attribute. :param value: Attribute value. :type value: unicode
def capture(returns, factor_returns, period=DAILY): """ Compute capture ratio. Parameters ---------- returns : pd.Series or np.ndarray Returns of the strategy, noncumulative. - See full explanation in :func:`~empyrical.stats.cum_returns`. factor_returns : pd.Series or np.ndarray...
Compute capture ratio. Parameters ---------- returns : pd.Series or np.ndarray Returns of the strategy, noncumulative. - See full explanation in :func:`~empyrical.stats.cum_returns`. factor_returns : pd.Series or np.ndarray Noncumulative returns of the factor to which beta is ...
def _add_thread(self, aThread): """ Private method to add a thread object to the snapshot. @type aThread: L{Thread} @param aThread: Thread object. """ ## if not isinstance(aThread, Thread): ## if hasattr(aThread, '__class__'): ## typename = aThr...
Private method to add a thread object to the snapshot. @type aThread: L{Thread} @param aThread: Thread object.
def backprop(self, input_data, targets, cache=None): """ Backpropagate through the logistic layer. **Parameters:** input_data : ``GPUArray`` Inpute data to compute activations for. targets : ``GPUArray`` The target values of the units. ...
Backpropagate through the logistic layer. **Parameters:** input_data : ``GPUArray`` Inpute data to compute activations for. targets : ``GPUArray`` The target values of the units. cache : list of ``GPUArray`` Cache obtained from forward pass. If the...
def set_edge_label(self, edge, label): """ Set the label of an edge. @type edge: edge @param edge: One edge. @type label: string @param label: Edge label. """ self.set_edge_properties(edge, label=label ) if not self.DIRECTED: self.s...
Set the label of an edge. @type edge: edge @param edge: One edge. @type label: string @param label: Edge label.
def shell(self): """ Opens a Django focussed Python shell. Essentially the equivalent of running `manage.py shell`. """ r = self.local_renderer if '@' in self.genv.host_string: r.env.shell_host_string = self.genv.host_string else: r.env.she...
Opens a Django focussed Python shell. Essentially the equivalent of running `manage.py shell`.
def extract_tar(url, target_dir, additional_compression="", remove_common_prefix=False, overwrite=False): """ extract a targz and install to the target directory """ try: if not os.path.exists(target_dir): os.makedirs(target_dir) tf = tarfile.TarFile.open(fileobj=download_to_bytesio(...
extract a targz and install to the target directory
def find_bind_module(name, verbose=False): """Find the bind module matching the given name. Args: name (str): Name of package to find bind module for. verbose (bool): If True, print extra output. Returns: str: Filepath to bind module .py file, or None if not found. """ bind...
Find the bind module matching the given name. Args: name (str): Name of package to find bind module for. verbose (bool): If True, print extra output. Returns: str: Filepath to bind module .py file, or None if not found.
def train(cls, new_data, old=None): """ Train a continuous scale Parameters ---------- new_data : array_like New values old : array_like Old range. Most likely a tuple of length 2. Returns ------- out : tuple L...
Train a continuous scale Parameters ---------- new_data : array_like New values old : array_like Old range. Most likely a tuple of length 2. Returns ------- out : tuple Limits(range) of the scale
def setPotential(self, columnIndex, potential): """ Sets the potential mapping for a given column. ``potential`` size must match the number of inputs, and must be greater than ``stimulusThreshold``. :param columnIndex: (int) column index to set potential for. :param potential: (list) value to ...
Sets the potential mapping for a given column. ``potential`` size must match the number of inputs, and must be greater than ``stimulusThreshold``. :param columnIndex: (int) column index to set potential for. :param potential: (list) value to set.
def sum(cls, iresults): """ Sum the data transfer information of a set of results """ res = object.__new__(cls) res.received = [] res.sent = 0 for iresult in iresults: res.received.extend(iresult.received) res.sent += iresult.sent ...
Sum the data transfer information of a set of results
def clean_all(self): """ *clean and sync all the bookkeeping tables* **Return:** - ``bookkeeper`` **Usage:** .. code-block:: python from rockAtlas.bookkeeping import bookkeeper bk = bookkeeper( log=log, ...
*clean and sync all the bookkeeping tables* **Return:** - ``bookkeeper`` **Usage:** .. code-block:: python from rockAtlas.bookkeeping import bookkeeper bk = bookkeeper( log=log, settings=settings, ...
def _debug_off(): """ turns off debugging by removing hidden tmp file """ if _os.path.exists(__debugflag__): _os.remove(__debugflag__) __loglevel__ = "ERROR" _LOGGER.info("debugging turned off") _set_debug_dict(__loglevel__)
turns off debugging by removing hidden tmp file
def federation_payment(self, fed_address, amount, asset_code='XLM', asset_issuer=None, source=None, allow_http=False): """Append a :class:`Payment <st...
Append a :class:`Payment <stellar_base.operation.Payment>` operation to the list of operations using federation on the destination address. Translates the destination stellar address to an account ID via :func:`federation <stellar_base.federation.federation>`, before creating a new paym...
def getGraphFieldList(self, graph_name): """Returns list of names of fields for graph with name graph_name. @param graph_name: Graph Name @return: List of field names for graph. """ graph = self._getGraph(graph_name, True) return graph.getField...
Returns list of names of fields for graph with name graph_name. @param graph_name: Graph Name @return: List of field names for graph.
def create_for_object_attributes(item_type, faulty_attribute_name: str, hint): """ Helper method for constructor attributes :param item_type: :return: """ # this leads to infinite loops # try: # prt_type = get_pretty_type_str(item_type) # exce...
Helper method for constructor attributes :param item_type: :return:
def extractPrintSaveIntermittens(): """ This function will print out the intermittents onto the screen for casual viewing. It will also print out where the giant summary dictionary is going to be stored. :return: None """ # extract intermittents from collected failed tests global g_summary...
This function will print out the intermittents onto the screen for casual viewing. It will also print out where the giant summary dictionary is going to be stored. :return: None
def set_window_override_redirect(self, window, override_redirect): """ Set the override_redirect value for a window. This generally means whether or not a window manager will manage this window. If you set it to 1, the window manager will usually not draw borders on the window, ...
Set the override_redirect value for a window. This generally means whether or not a window manager will manage this window. If you set it to 1, the window manager will usually not draw borders on the window, etc. If you set it to 0, the window manager will see it like a normal applicat...
def setAutoRaise(self, state): """ Sets whether or not this combo box should automatically raise up. :param state | <bool> """ self._autoRaise = state self.setMouseTracking(state) try: self.lineEdit().setVisible(not state) ...
Sets whether or not this combo box should automatically raise up. :param state | <bool>
def dfs(args=None, properties=None, hadoop_conf_dir=None): """ Run the Hadoop file system shell. All arguments are passed to :func:`run_class`. """ # run FsShell directly (avoids "hadoop dfs" deprecation) return run_class( "org.apache.hadoop.fs.FsShell", args, properties, hadoop...
Run the Hadoop file system shell. All arguments are passed to :func:`run_class`.
def make_compare(key, value, obj): "Map a key name to a specific comparison function" if '__' not in key: # If no __ exists, default to doing an "exact" comparison key, comp = key, 'exact' else: key, comp = key.rsplit('__', 1) # Check if comp is valid if hasattr(Compare, comp...
Map a key name to a specific comparison function
def generate(self, *args, **kwargs): """For very large templates it can be useful to not render the whole template at once but evaluate each statement after another and yield piece for piece. This method basically does exactly that and returns a generator that yields one item after anot...
For very large templates it can be useful to not render the whole template at once but evaluate each statement after another and yield piece for piece. This method basically does exactly that and returns a generator that yields one item after another as unicode strings. It accepts the ...
def _create_id(self): """Returns a freshly created DB-wide unique ID.""" cursor = self._db.cursor() cursor.execute('INSERT INTO Ids DEFAULT VALUES') return cursor.lastrowid
Returns a freshly created DB-wide unique ID.
def xpathRegisterVariable(self, name, ns_uri, value): """Register a variable with the XPath context """ ret = libxml2mod.xmlXPathRegisterVariable(self._o, name, ns_uri, value) return ret
Register a variable with the XPath context
def format_number(number): """ >>> format_number(1) 1 >>> format_number(22) 22 >>> format_number(333) 333 >>> format_number(4444) '4,444' >>> format_number(55555) '55,555' >>> format_number(666666) '666,666' >>> format_number(7777777) '7,777,777' """ c...
>>> format_number(1) 1 >>> format_number(22) 22 >>> format_number(333) 333 >>> format_number(4444) '4,444' >>> format_number(55555) '55,555' >>> format_number(666666) '666,666' >>> format_number(7777777) '7,777,777'
def image_mime_type(data): """Return the MIME type of the image data (a bytestring). """ # This checks for a jpeg file with only the magic bytes (unrecognized by # imghdr.what). imghdr.what returns none for that type of file, so # _wider_test_jpeg is run in that case. It still returns None if it did...
Return the MIME type of the image data (a bytestring).
def _clean_intenum(obj): """Remove all IntEnum classes from a map.""" if isinstance(obj, dict): for key, value in obj.items(): if isinstance(value, IntEnum): obj[key] = value.value elif isinstance(value, (dict, list)): obj[key] = _clean_intenum(va...
Remove all IntEnum classes from a map.
def delete_record_set(self, record_set): """Append a record set to the 'deletions' for the change set. :type record_set: :class:`google.cloud.dns.resource_record_set.ResourceRecordSet` :param record_set: the record set to append. :raises: ``ValueError`` if ``record_set`` is...
Append a record set to the 'deletions' for the change set. :type record_set: :class:`google.cloud.dns.resource_record_set.ResourceRecordSet` :param record_set: the record set to append. :raises: ``ValueError`` if ``record_set`` is not of the required type.
def read(self): """ Read and return the contents of the file. """ with open(self.path) as f: d = f.read() return d
Read and return the contents of the file.
def get_ref(self): """ Returns a `DBRef` for this object or ``None``. """ _id = self.id if _id is None: return None else: return DBRef(self.collection, _id)
Returns a `DBRef` for this object or ``None``.
def set_object_metadata(self, container, obj, metadata, clear=False, extra_info=None, prefix=None): """ Accepts a dictionary of metadata key/value pairs and updates the specified object metadata with them. If 'clear' is True, any existing metadata is deleted and only the ...
Accepts a dictionary of metadata key/value pairs and updates the specified object metadata with them. If 'clear' is True, any existing metadata is deleted and only the passed metadata is retained. Otherwise, the values passed here update the object's metadata. 'extra_info; is a...
def save_object(self, obj): """ Save object to disk as JSON. Generally shouldn't be called directly. """ obj.pre_save(self.jurisdiction.jurisdiction_id) filename = '{0}_{1}.json'.format(obj._type, obj._id).replace('/', '-') self.info('save %s %s as %s',...
Save object to disk as JSON. Generally shouldn't be called directly.
def ocsp_urls(self): """ :return: A list of zero or more unicode strings of the OCSP URLs for this cert """ if not self.authority_information_access_value: return [] output = [] for entry in self.authority_information_access_value: ...
:return: A list of zero or more unicode strings of the OCSP URLs for this cert
def translate_labels(val): ''' Can either be a list of label names, or a list of name=value pairs. The API can accept either a list of label names or a dictionary mapping names to values, so the value we translate will be different depending on the input. ''' if not isinstance(val, dict): ...
Can either be a list of label names, or a list of name=value pairs. The API can accept either a list of label names or a dictionary mapping names to values, so the value we translate will be different depending on the input.
def contains_field_list(self, path, name): """ Returns True if a multi-valued field exists at the specified path, otherwise False. :param path: str or Path instance :param name: :type name: str :return: :raises ValueError: A component of path is a field name. ...
Returns True if a multi-valued field exists at the specified path, otherwise False. :param path: str or Path instance :param name: :type name: str :return: :raises ValueError: A component of path is a field name. :raises TypeError: The field name is a component of a path...
def is_user_id_available(self, user_id, note=None, loglevel=logging.DEBUG): """Determine whether the specified user_id available. @param user_id: User id to be checked. @param note: See send() @type user_id: integer @rtype...
Determine whether the specified user_id available. @param user_id: User id to be checked. @param note: See send() @type user_id: integer @rtype: boolean @return: True is the specified user id is not used yet, False if it's already been assigned to a user.