code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def put(self, endpoint, data, **kwargs): """ PUT requests """ return self.__request("PUT", endpoint, data, **kwargs)
PUT requests
def get_sigla(self, work): """Returns a list of all of the sigla for `work`. :param work: name of work :type work: `str` :rtype: `list` of `str` """ return [os.path.splitext(os.path.basename(path))[0] for path in glob.glob(os.path.join(self._path, work, ...
Returns a list of all of the sigla for `work`. :param work: name of work :type work: `str` :rtype: `list` of `str`
def run(self): """ Run the job and immediately reschedule it. :return: The return value returned by the `job_func` """ logger.info('Running job %s', self) ret = self.job_func() self.last_run = datetime.datetime.now() self._schedule_next_run() retu...
Run the job and immediately reschedule it. :return: The return value returned by the `job_func`
def multipart_complete(self, multipart): """Complete a multipart upload. :param multipart: A :class:`invenio_files_rest.models.MultipartObject` instance. :returns: A Flask response. """ multipart.complete() db.session.commit() version_id = str(uuid.u...
Complete a multipart upload. :param multipart: A :class:`invenio_files_rest.models.MultipartObject` instance. :returns: A Flask response.
def set(self, handler, attr, name, path, cfg): """ Obtain value for config variable, by prompting the user for input and substituting a default value if needed. Also does validation on user input """ full_name = ("%s.%s" % (path, name)).strip(".") # obtain def...
Obtain value for config variable, by prompting the user for input and substituting a default value if needed. Also does validation on user input
def _copy(self, filename, dir1, dir2): """ Private function for copying a file """ # NOTE: dir1 is source & dir2 is target if self._copyfiles: rel_path = filename.replace('\\', '/').split('/') rel_dir = '/'.join(rel_path[:-1]) filename = rel_path[-1] ...
Private function for copying a file
async def process_request(self, path, headers): """ This hook is called to determine if the websocket should return an HTTP response and close. Our behavior here is to start the ASGI application, and then wait for either `accept` or `close` in order to determine if we should ...
This hook is called to determine if the websocket should return an HTTP response and close. Our behavior here is to start the ASGI application, and then wait for either `accept` or `close` in order to determine if we should close the connection.
def backtrace_on_usr1 (): """Install a signal handler such that this program prints a Python traceback upon receipt of SIGUSR1. This could be useful for checking that long-running programs are behaving properly, or for discovering where an infinite loop is occurring. Note, however, that the Python ...
Install a signal handler such that this program prints a Python traceback upon receipt of SIGUSR1. This could be useful for checking that long-running programs are behaving properly, or for discovering where an infinite loop is occurring. Note, however, that the Python interpreter does not invoke Pytho...
def _freq_parser(self, freq): """ day, hour, min, sec, """ freq = freq.lower().strip() try: if "day" in freq: freq = freq.replace("day", "") return timedelta(days=int(freq)) elif "hour" in freq: freq = freq.r...
day, hour, min, sec,
def scrap(self, url=None, scheme=None, timeout=None, html_parser=None, cache_ext=None ): """ Scrap a url and parse the content according to scheme :param url: Url to parse (default: self._url) :type url: str :param scheme: Scheme to apply to html ...
Scrap a url and parse the content according to scheme :param url: Url to parse (default: self._url) :type url: str :param scheme: Scheme to apply to html (default: self._scheme) :type scheme: dict :param timeout: Timeout for http operation (default: self._timout) :type t...
def untokenized_tfds_dataset(dataset_name=gin.REQUIRED, text2self=gin.REQUIRED, tfds_data_dir=gin.REQUIRED, dataset_split=gin.REQUIRED, batch_size=gin.REQUIRED, sequence_lengt...
Reads a tensorflow_datasets dataset. Returns a tf.data.Dataset containing single tokenized examples where each feature ends in EOS=1. Args: dataset_name: a string text2self: a boolean tfds_data_dir: a boolean dataset_split: a string batch_size: an integer sequence_length: an integer ...
def heterogzygote_counts(paired): """Provide tumor/normal counts at population heterozyogte sites with CollectAllelicCounts. """ work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(paired.tumor_data), "structural", "counts")) key = "germline_het_pon" het_bed = tz.get_in(["genome_resources", "...
Provide tumor/normal counts at population heterozyogte sites with CollectAllelicCounts.
def Ra(L: float, Ts: float, Tf: float, alpha: float, beta: float, nu: float ) -> float: """ Calculate the Ralleigh number. :param L: [m] heat transfer surface characteristic length. :param Ts: [K] heat transfer surface temperature. :param Tf: [K] bulk fluid temperature. :param alpha: [m2...
Calculate the Ralleigh number. :param L: [m] heat transfer surface characteristic length. :param Ts: [K] heat transfer surface temperature. :param Tf: [K] bulk fluid temperature. :param alpha: [m2/s] fluid thermal diffusivity. :param beta: [1/K] fluid coefficient of thermal expansion. :param nu...
def create_secgroups(self): """Create security groups as defined in the configs.""" utils.banner("Creating Security Group") sgobj = securitygroup.SpinnakerSecurityGroup( app=self.app, env=self.env, region=self.region, prop_path=self.json_path) sgobj.create_security_group()
Create security groups as defined in the configs.
def list_targets_by_rule(client=None, **kwargs): """ Rule='string' """ result = client.list_targets_by_rule(**kwargs) if not result.get("Targets"): result.update({"Targets": []}) return result
Rule='string'
def alwaysCalledWith(self, *args, **kwargs): #pylint: disable=invalid-name """ Determining whether args/kwargs are the ONLY args/kwargs called previously Eg. f(1, 2, 3) f(1, 2, 3) spy.alwaysCalledWith(1, 2) will return True, because they are the ONLY called ar...
Determining whether args/kwargs are the ONLY args/kwargs called previously Eg. f(1, 2, 3) f(1, 2, 3) spy.alwaysCalledWith(1, 2) will return True, because they are the ONLY called args f(1, 3) spy.alwaysCalledWith(1) will return True, because 1 is the O...
def get_metric_index(self, metric_cls): """ Get the index name with the data for a metric class :param metric_cls: a metric class :return: the name of the index with the data for the metric """ ds = self.class2ds[metric_cls.ds] if self.index_dict[ds]: ...
Get the index name with the data for a metric class :param metric_cls: a metric class :return: the name of the index with the data for the metric
def find_recent(self, nrecent=4): '''Find recent non-trashed notes''' try: rows = self.cur.execute("SELECT noteId FROM note WHERE book > 0 ORDER BY date DESC LIMIT %d;"%nrecent).fetchall() except: self.error("nota.find_recent() cannot look up note list") # Possibl...
Find recent non-trashed notes
def as_spectrum(self, binned=True): """Reduce the observation to a simple spectrum object. An observation is a complex object with some restrictions on its capabilities. At times, it would be useful to work with the simulated observation as a simple object that is easier to mani...
Reduce the observation to a simple spectrum object. An observation is a complex object with some restrictions on its capabilities. At times, it would be useful to work with the simulated observation as a simple object that is easier to manipulate and takes up less memory. Param...
def get_relations(self, cursor, table_name): """ Returns a dictionary of {field_index: (field_index_other_table, other_table)} representing all relationships to the given table. Indexes are 0-based. """ # pylint:disable=global-statement,too-many-locals,too-many-nested-blocks,unus...
Returns a dictionary of {field_index: (field_index_other_table, other_table)} representing all relationships to the given table. Indexes are 0-based.
def allclose_up_to_global_phase( a: np.ndarray, b: np.ndarray, *, rtol: float = 1.e-5, atol: float = 1.e-8, equal_nan: bool = False ) -> bool: """Determines if a ~= b * exp(i t) for some t. Args: a: A numpy array. b: Another numpy array. r...
Determines if a ~= b * exp(i t) for some t. Args: a: A numpy array. b: Another numpy array. rtol: Relative error tolerance. atol: Absolute error tolerance. equal_nan: Whether or not NaN entries should be considered equal to other NaN entries.
def exists_type(self, using=None, **kwargs): """ Check if a type/types exists in the index. Any additional keyword arguments will be passed to ``Elasticsearch.indices.exists_type`` unchanged. """ return self._get_connection(using).indices.exists_type(index=self._name, **...
Check if a type/types exists in the index. Any additional keyword arguments will be passed to ``Elasticsearch.indices.exists_type`` unchanged.
def count(args): """ count occurences in a list of lists >>> count([['a','b'],['a']]) defaultdict(int, {'a' : 2, 'b' : 1}) """ counts = defaultdict(int) for arg in args: for item in arg: counts[item] = counts[item] + 1 return counts
count occurences in a list of lists >>> count([['a','b'],['a']]) defaultdict(int, {'a' : 2, 'b' : 1})
def register_rpc(self, address, rpc_id, func): """Register a single RPC handler with the given info. This function can be used to directly register individual RPCs, rather than delegating all RPCs at a given address to a virtual Tile. If calls to this function are mixed with ca...
Register a single RPC handler with the given info. This function can be used to directly register individual RPCs, rather than delegating all RPCs at a given address to a virtual Tile. If calls to this function are mixed with calls to add_tile for the same address, these RPCs w...
def goto(directory, create=False): """Context object for changing directory. Args: directory (str): Directory to go to. create (bool): Create directory if it doesn't exists. Usage:: >>> with goto(directory) as ok: ... if not ok: ... print 'Error' ...
Context object for changing directory. Args: directory (str): Directory to go to. create (bool): Create directory if it doesn't exists. Usage:: >>> with goto(directory) as ok: ... if not ok: ... print 'Error' ... else: ... print ...
def do_execute(self): """ The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str """ result = None cont = self.input.payload serialization.write_all( str(self.resolve_option("output")), ...
The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str
def localize_sql(self, sql: str) -> str: """Translates ?-placeholder SQL to appropriate dialect. For example, MySQLdb uses %s rather than ?. """ # pyodbc seems happy with ? now (pyodbc.paramstyle is 'qmark'); # using ? is much simpler, because we may want to use % with LIKE ...
Translates ?-placeholder SQL to appropriate dialect. For example, MySQLdb uses %s rather than ?.
async def register(self, check, *, token=None): """Registers a new local check Parameters: check (Object): Check definition token (ObjectID): Token ID Returns: bool: ``True`` on success The register endpoint is used to add a new check to the local ag...
Registers a new local check Parameters: check (Object): Check definition token (ObjectID): Token ID Returns: bool: ``True`` on success The register endpoint is used to add a new check to the local agent. Checks may be of script, HTTP, TCP, or TTL typ...
def preprocess_mnist(sc, options): """ Preprocess mnist dataset. Normalize and transform into Sample of RDDs. """ train_data = get_mnist(sc, "train", options.dataPath)\ .map(lambda rec_tuple: (normalizer(rec_tuple[0], mnist.TRAIN_MEAN, mnist.TRAIN_STD), rec_tu...
Preprocess mnist dataset. Normalize and transform into Sample of RDDs.
def push_group_with_content(self, content): """Temporarily redirects drawing to an intermediate surface known as a group. The redirection lasts until the group is completed by a call to :meth:`pop_group` or :meth:`pop_group_to_source`. These calls provide the result of any drawin...
Temporarily redirects drawing to an intermediate surface known as a group. The redirection lasts until the group is completed by a call to :meth:`pop_group` or :meth:`pop_group_to_source`. These calls provide the result of any drawing to the group as a pattern, (either as...
def is_authoring_node(self, node): """ Returns if given Node is an authoring node. :param node: Node. :type node: ProjectNode or DirectoryNode or FileNode :return: Is authoring node. :rtype: bool """ for parent_node in foundations.walkers.nodes_walker(no...
Returns if given Node is an authoring node. :param node: Node. :type node: ProjectNode or DirectoryNode or FileNode :return: Is authoring node. :rtype: bool
def on_connect_button__clicked(self, event): ''' Connect to Zero MQ plugin hub (`zmq_plugin.hub.Hub`) using the settings from the text entry fields (e.g., hub URI, plugin name). Emit `plugin-connected` signal with the new plugin instance after hub connection has been established...
Connect to Zero MQ plugin hub (`zmq_plugin.hub.Hub`) using the settings from the text entry fields (e.g., hub URI, plugin name). Emit `plugin-connected` signal with the new plugin instance after hub connection has been established.
def source_channels(self): """ Returns a set describing the source channels on which the gate is defined. """ source_channels = [v.coordinates.keys() for v in self.verts] return set(itertools.chain(*source_channels))
Returns a set describing the source channels on which the gate is defined.
def read(path, encoding="utf-8"): """Read the content of the file. Args: path (str): Path to the file encoding (str): File encoding. Default: utf-8 Returns: str: File content or empty string if there was an error """ try: with io.open(path, encoding=encoding) as f: ...
Read the content of the file. Args: path (str): Path to the file encoding (str): File encoding. Default: utf-8 Returns: str: File content or empty string if there was an error
def extract_wavs(utterances: List[Utterance], tgt_dir: Path, lazy: bool) -> None: """ Extracts WAVs from the media files associated with a list of Utterance objects and stores it in a target directory. Args: utterances: A list of Utterance objects, which include information ...
Extracts WAVs from the media files associated with a list of Utterance objects and stores it in a target directory. Args: utterances: A list of Utterance objects, which include information about the source media file, and the offset of the utterance in the media_file. tg...
def annotate_with_depth(in_file, items): """Annotate called VCF file with depth using duphold (https://github.com/brentp/duphold) Currently annotates single sample and tumor samples in somatic analysis. """ bam_file = None if len(items) == 1: bam_file = dd.get_align_bam(items[0]) else: ...
Annotate called VCF file with depth using duphold (https://github.com/brentp/duphold) Currently annotates single sample and tumor samples in somatic analysis.
def render_template(self, template_file, target_file, template_vars = {}): """ Render a Jinja2 template for the backend The template file is expected in the directory templates/BACKEND_NAME. """ template_dir = str(self.__class__.__name__).lower() template = self.jinja_en...
Render a Jinja2 template for the backend The template file is expected in the directory templates/BACKEND_NAME.
def _wrap_attr(self, attrs, context=None): """wrap bound methods of attrs in a InstanceMethod proxies""" for attr in attrs: if isinstance(attr, UnboundMethod): if _is_property(attr): yield from attr.infer_call_result(self, context) else: ...
wrap bound methods of attrs in a InstanceMethod proxies
def colname_gen(df,col_name = 'unnamed_col'): """ Returns a column name that isn't in the specified DataFrame Parameters: df - DataFrame DataFrame to analyze col_name - string, default 'unnamed_col' Column name to use as the base value for the generated column name """ if col_nam...
Returns a column name that isn't in the specified DataFrame Parameters: df - DataFrame DataFrame to analyze col_name - string, default 'unnamed_col' Column name to use as the base value for the generated column name
def MapFields(function, key=True): """ Transformation factory that maps `function` on the values of a row. It can be applied either to 1. all columns (`key=True`), 2. no column (`key=False`), or 3. a subset of columns by passing a callable, which takes column name and returns `bool` (same a...
Transformation factory that maps `function` on the values of a row. It can be applied either to 1. all columns (`key=True`), 2. no column (`key=False`), or 3. a subset of columns by passing a callable, which takes column name and returns `bool` (same as the parameter `function` in `filter`). :...
def filter_expr(cls_or_alias, **filters): """ forms expressions like [Product.age_from = 5, Product.subject_ids.in_([1,2])] from filters like {'age_from': 5, 'subject_ids__in': [1,2]} Example 1: db.query(Product).filter( *Produ...
forms expressions like [Product.age_from = 5, Product.subject_ids.in_([1,2])] from filters like {'age_from': 5, 'subject_ids__in': [1,2]} Example 1: db.query(Product).filter( *Product.filter_expr(age_from = 5, subject_ids__in=[1, 2])) ...
def max_rigid_id(self): """Returns the maximum rigid body ID contained in the Compound. This is usually used by compound.root to determine the maximum rigid_id in the containment hierarchy. Returns ------- int or None The maximum rigid body ID contained in t...
Returns the maximum rigid body ID contained in the Compound. This is usually used by compound.root to determine the maximum rigid_id in the containment hierarchy. Returns ------- int or None The maximum rigid body ID contained in the Compound. If no rigi...
def unsubscribe_all(self): """Unsubscribes all channels""" for channel in self.list_all(): channel.ensure_stopped() self.connect_api.stop_notifications()
Unsubscribes all channels
def get_translation_args(self, args): """ Returns linguist args from model args. """ translation_args = [] for arg in args: condition = self._get_linguist_condition(arg, transform=True) if condition: translation_args.append(condition) ...
Returns linguist args from model args.
def light(self): """ Returns if object is augmenting or diminishing its light. """ sun = self.chart.getObject(const.SUN) return light(self.obj, sun)
Returns if object is augmenting or diminishing its light.
def install_signal_trap(signums = (signal.SIGTERM, signal.SIGTSTP), retval = 1): """ Installs a signal handler to erase temporary scratch files when a signal is received. This can be used to help ensure scratch files are erased when jobs are evicted by Condor. signums is a squence of the signals to trap, the def...
Installs a signal handler to erase temporary scratch files when a signal is received. This can be used to help ensure scratch files are erased when jobs are evicted by Condor. signums is a squence of the signals to trap, the default value is a list of the signals used by Condor to kill and/or evict jobs. The lo...
def edit(self, image_id, name=None, note=None, tag=None): """Edit image related details. :param int image_id: The ID of the image :param string name: Name of the Image. :param string note: Note of the image. :param string tag: Tags of the image to be updated to. """ ...
Edit image related details. :param int image_id: The ID of the image :param string name: Name of the Image. :param string note: Note of the image. :param string tag: Tags of the image to be updated to.
def _expected_condition_find_element(self, element): """Tries to find the element, but does not thrown an exception if the element is not found :param element: PageElement or element locator as a tuple (locator_type, locator_value) to be found :returns: the web element if it has been found or F...
Tries to find the element, but does not thrown an exception if the element is not found :param element: PageElement or element locator as a tuple (locator_type, locator_value) to be found :returns: the web element if it has been found or False :rtype: selenium.webdriver.remote.webelement.WebEle...
def delete_map(self, url, map=None, auth_map=None): """Gera um XML a partir dos dados do dicionário e o envia através de uma requisição DELETE. :param url: URL para enviar a requisição HTTP. :param map: Dicionário com os dados do corpo da requisição HTTP. :param auth_map: Dicionário com...
Gera um XML a partir dos dados do dicionário e o envia através de uma requisição DELETE. :param url: URL para enviar a requisição HTTP. :param map: Dicionário com os dados do corpo da requisição HTTP. :param auth_map: Dicionário com as informações para autenticação na networkAPI. :retu...
def visit_importfrom(self, node): """Checks to see if a module uses a non-Python logging module.""" try: logging_name = self._from_imports[node.modname] for module, as_name in node.names: if module == logging_name: self._logging_names.add(as_na...
Checks to see if a module uses a non-Python logging module.
def camera_status_encode(self, time_usec, target_system, cam_idx, img_idx, event_id, p1, p2, p3, p4): ''' Camera Event time_usec : Image timestamp (microseconds since UNIX epoch, according to camera clock) (uint64_t) target_system ...
Camera Event time_usec : Image timestamp (microseconds since UNIX epoch, according to camera clock) (uint64_t) target_system : System ID (uint8_t) cam_idx : Camera ID (uint8_t) img_idx : Imag...
def launch_game( players: List[Player], launch_params: Dict[str, Any], show_all: bool, read_overwrite: bool, wait_callback: Callable ) -> None: """ :raises DockerException, ContainerException, RealtimeOutedException """ if not players: raise GameException(...
:raises DockerException, ContainerException, RealtimeOutedException
def append_qs(url, query_string): """Append query_string values to an existing URL and return it as a string. query_string can be: * an encoded string: 'test3=val1&test3=val2' * a dict of strings: {'test3': 'val'} * a dict of lists of strings: {'test3': ['val1', 'val2']} * a lis...
Append query_string values to an existing URL and return it as a string. query_string can be: * an encoded string: 'test3=val1&test3=val2' * a dict of strings: {'test3': 'val'} * a dict of lists of strings: {'test3': ['val1', 'val2']} * a list of tuples: [('test3', 'val1'), ('test3'...
def setProperty(self, name, value): ''' Called by the engine to set a driver property value. @param name: Name of the property @type name: str @param value: Property value @type value: object ''' self._push(self._driver.setProperty, (name, value))
Called by the engine to set a driver property value. @param name: Name of the property @type name: str @param value: Property value @type value: object
def sinter(self, keys, *args): """Emulate sinter.""" func = lambda left, right: left.intersection(right) return self._apply_to_sets(func, "SINTER", keys, *args)
Emulate sinter.
def taskfileinfo_task_data(tfi, role): """Return the data for task :param tfi: the :class:`jukeboxcore.filesys.TaskFileInfo` holds the data :type tfi: :class:`jukeboxcore.filesys.TaskFileInfo` :param role: item data role :type role: QtCore.Qt.ItemDataRole :returns: data for the task :rtype:...
Return the data for task :param tfi: the :class:`jukeboxcore.filesys.TaskFileInfo` holds the data :type tfi: :class:`jukeboxcore.filesys.TaskFileInfo` :param role: item data role :type role: QtCore.Qt.ItemDataRole :returns: data for the task :rtype: depending on role :raises: None
def get_variable_set(self, variable_set, data): """Filters the given variable set based on request parameters""" if data.get('dynamic_layers'): variable_set = [] # TODO elif data.get('layers'): op, layer_ids = data['layers'].split(':', 1) op = op.lower() ...
Filters the given variable set based on request parameters
def _construct_replset(self, basedir, portstart, name, num_nodes, arbiter, extra=''): """ Construct command line strings for a replicaset. Handles single set or sharded cluster. """ self.config_docs[name] = {'_id': name, 'members': []} # Const...
Construct command line strings for a replicaset. Handles single set or sharded cluster.
def macro(name): """Replaces :func:`~flask_admin.model.template.macro`, adding support for using macros imported from another file. For example: .. code:: html+jinja {# templates/admin/column_formatters.html #} {% macro email(model, column) %} {% set address = model[column] %} ...
Replaces :func:`~flask_admin.model.template.macro`, adding support for using macros imported from another file. For example: .. code:: html+jinja {# templates/admin/column_formatters.html #} {% macro email(model, column) %} {% set address = model[column] %} <a href="mailto...
def deep_update_dict(origin_dict, override_dict): """ update origin dict with override dict recursively e.g. origin_dict = {'a': 1, 'b': {'c': 2, 'd': 4}} override_dict = {'b': {'c': 3}} return: {'a': 1, 'b': {'c': 3, 'd': 4}} """ if not override_dict: return origin_dict for ke...
update origin dict with override dict recursively e.g. origin_dict = {'a': 1, 'b': {'c': 2, 'd': 4}} override_dict = {'b': {'c': 3}} return: {'a': 1, 'b': {'c': 3, 'd': 4}}
def show(movie): """ Show the movie metadata. """ for key, value in sorted(movie.iteritems(), cmp=metadata_sorter, key=lambda x: x[0]): if isinstance(value, list): if not value: continue other = value[1:] value = value[0] else: ...
Show the movie metadata.
def replace(self, src: str) -> str: """ Extends LaTeX syntax via regex preprocess :param src: str LaTeX string :return: str New LaTeX string """ if not self.readied: self.ready() # Brackets + simple pre replacements: sr...
Extends LaTeX syntax via regex preprocess :param src: str LaTeX string :return: str New LaTeX string
def to_internal_value(self, data): """ Deserialize data from translations fields. For each received language, delegate validation logic to the translation model serializer. """ if data is None: return if not isinstance(data, dict): self.f...
Deserialize data from translations fields. For each received language, delegate validation logic to the translation model serializer.
def run(self, args): """**down** [*count*] Move the current frame down in the stack trace (to a newer frame). 0 is the most recent frame. If no count is given, move down 1. See also: --------- `up` and `frame`.""" Mframe.adjust_relative(self.proc, self.name, args, self.signum) return False
**down** [*count*] Move the current frame down in the stack trace (to a newer frame). 0 is the most recent frame. If no count is given, move down 1. See also: --------- `up` and `frame`.
def get_repo(path=None, alias=None, create=False): """ Returns ``Repository`` object of type linked with given ``alias`` at the specified ``path``. If ``alias`` is not given it will try to guess it using get_scm method """ if create: if not (path or alias): raise TypeError("I...
Returns ``Repository`` object of type linked with given ``alias`` at the specified ``path``. If ``alias`` is not given it will try to guess it using get_scm method
def sendcontrol(self, char): '''Helper method that wraps send() with mnemonic access for sending control character to the child (such as Ctrl-C or Ctrl-D). For example, to send Ctrl-G (ASCII 7, bell, '\a'):: child.sendcontrol('g') See also, sendintr() and sendeof(). ...
Helper method that wraps send() with mnemonic access for sending control character to the child (such as Ctrl-C or Ctrl-D). For example, to send Ctrl-G (ASCII 7, bell, '\a'):: child.sendcontrol('g') See also, sendintr() and sendeof().
def crab_factory(**kwargs): ''' Factory that generates a CRAB client. A few parameters will be handled by the factory, other parameters will be passed on to the client. :param wsdl: `Optional.` Allows overriding the default CRAB wsdl url. :param proxy: `Optional.` A dictionary of proxy informa...
Factory that generates a CRAB client. A few parameters will be handled by the factory, other parameters will be passed on to the client. :param wsdl: `Optional.` Allows overriding the default CRAB wsdl url. :param proxy: `Optional.` A dictionary of proxy information that is passed to the under...
async def prepare_decrypter(client, cdn_client, cdn_redirect): """ Prepares a new CDN decrypter. :param client: a TelegramClient connected to the main servers. :param cdn_client: a new client connected to the CDN. :param cdn_redirect: the redirect file object that caused this ca...
Prepares a new CDN decrypter. :param client: a TelegramClient connected to the main servers. :param cdn_client: a new client connected to the CDN. :param cdn_redirect: the redirect file object that caused this call. :return: (CdnDecrypter, first chunk file data)
def executable_path(conn, executable): """ Remote validator that accepts a connection object to ensure that a certain executable is available returning its full path if so. Otherwise an exception with thorough details will be raised, informing the user that the executable was not found. """ ...
Remote validator that accepts a connection object to ensure that a certain executable is available returning its full path if so. Otherwise an exception with thorough details will be raised, informing the user that the executable was not found.
def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict ""...
Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict
def did_you_mean(unknown_command, entry_points): """ Return the command with the name most similar to what the user typed. This is used to suggest a correct command when the user types an illegal command. """ from difflib import SequenceMatcher similarity = lambda x: SequenceMatcher(None,...
Return the command with the name most similar to what the user typed. This is used to suggest a correct command when the user types an illegal command.
def init_db(self, db_path): '''initialize the database, with the default database path or custom of the format sqlite:////scif/data/expfactory.db The custom path can be set with the environment variable SREGISTRY_DATABASE when a user creates the client, we must initialize this db the database s...
initialize the database, with the default database path or custom of the format sqlite:////scif/data/expfactory.db The custom path can be set with the environment variable SREGISTRY_DATABASE when a user creates the client, we must initialize this db the database should use the .singularity cache fo...
def do_i_raise_dependency(self, status, inherit_parents, hosts, services, timeperiods): # pylint: disable=too-many-locals """Check if this object or one of its dependency state (chk dependencies) match the status :param status: state list where dependency matters (notification failure criteria)...
Check if this object or one of its dependency state (chk dependencies) match the status :param status: state list where dependency matters (notification failure criteria) :type status: list :param inherit_parents: recurse over parents :type inherit_parents: bool :param hosts: ho...
def load_header_chain( cls, chain_path ): """ Load the header chain from disk. Each chain element will be a dictionary with: * """ header_parser = BlockHeaderSerializer() chain = [] height = 0 with open(chain_path, "rb") as f: h = SP...
Load the header chain from disk. Each chain element will be a dictionary with: *
def from_string(url, default_protocol='telnet'): """ Parses the given URL and returns an URL object. There are some differences to Python's built-in URL parser: - It is less strict, many more inputs are accepted. This is necessary to allow for passing a simple hostname as a UR...
Parses the given URL and returns an URL object. There are some differences to Python's built-in URL parser: - It is less strict, many more inputs are accepted. This is necessary to allow for passing a simple hostname as a URL. - You may specify a default protocol that is used when the...
def normalize_path_out(self, path): """Normalizes path sent to client :param path: path to normalize :return: normalized path """ if path.startswith(self._CWD): normalized_path = path[len(self._CWD):] else: normalized_path = path # For remo...
Normalizes path sent to client :param path: path to normalize :return: normalized path
def _reset(self): """ reset: None -> None Resets the remote XBee device to a standard configuration """ # Analog pin 0 self.hw.remote_at( dest_addr=self.remote_addr, command='D0', parameter='\x02') # Disengage remote LED, buzz...
reset: None -> None Resets the remote XBee device to a standard configuration
def write(self, frame): """Write frame to Bus.""" if not isinstance(frame, FrameBase): raise PyVLXException("Frame not of type FrameBase", frame_type=type(frame)) PYVLXLOG.debug("SEND: %s", frame) self.transport.write(slip_pack(bytes(frame)))
Write frame to Bus.
def p_line_label_asm(p): """ line : LABEL asms NEWLINE """ p[0] = p[2] __DEBUG__("Declaring '%s%s' (value %04Xh) in %i" % (NAMESPACE, p[1], MEMORY.org, p.lineno(1))) MEMORY.declare_label(p[1], p.lineno(1))
line : LABEL asms NEWLINE
def generate_security_data(self): """Generate a dict of security data for "initial" data.""" timestamp = int(time.time()) security_dict = { 'content_type': str(self.target_object._meta), 'object_pk': str(self.target_object._get_pk_val()), 'timestamp': str(time...
Generate a dict of security data for "initial" data.
def Vmg(self): r'''Gas-phase molar volume of the mixture at its current temperature, pressure, and composition in units of [m^3/mol]. For calculation of this property at other temperatures or pressures or compositions, or specifying manually the method used to calculate it, and m...
r'''Gas-phase molar volume of the mixture at its current temperature, pressure, and composition in units of [m^3/mol]. For calculation of this property at other temperatures or pressures or compositions, or specifying manually the method used to calculate it, and more - see the object or...
def bleu_score(logits, labels): """Approximate BLEU score computation between labels and predictions. An approximate BLEU scoring method since we do not glue word pieces or decode the ids and tokenize the output. By default, we use ngram order of 4 and use brevity penalty. Also, this does not have beam search....
Approximate BLEU score computation between labels and predictions. An approximate BLEU scoring method since we do not glue word pieces or decode the ids and tokenize the output. By default, we use ngram order of 4 and use brevity penalty. Also, this does not have beam search. Args: logits: Tensor of size ...
def plot_dives_pitch(depths, dive_mask, des, asc, pitch, pitch_lf): '''Plot dives with phase and associated pitch angle with HF signal Args ---- depths: ndarray Depth values at each sensor sampling dive_mask: ndarray Boolean mask slicing dives from the tag data des: ndarray ...
Plot dives with phase and associated pitch angle with HF signal Args ---- depths: ndarray Depth values at each sensor sampling dive_mask: ndarray Boolean mask slicing dives from the tag data des: ndarray boolean mask for slicing descent phases of dives from tag dta asc: ...
def optional_else(self, node, last): """ Create op_pos for optional else """ if node.orelse: min_first_max_last(node, node.orelse[-1]) if 'else' in self.operators: position = (node.orelse[0].first_line, node.orelse[0].first_col) _, efirst = self.op...
Create op_pos for optional else
def predict(inputs_list, problem, request_fn): """Encodes inputs, makes request to deployed TF model, and decodes outputs.""" assert isinstance(inputs_list, list) fname = "inputs" if problem.has_inputs else "targets" input_encoder = problem.feature_info[fname].encoder input_ids_list = [ _encode(inputs, ...
Encodes inputs, makes request to deployed TF model, and decodes outputs.
def many_init(cls, *args, **kwargs): """ This method handles creating a parent `ManyRelatedField` instance when the `many=True` keyword argument is passed. Typically you won't need to override this method. Note that we're over-cautious in passing most arguments to both parent ...
This method handles creating a parent `ManyRelatedField` instance when the `many=True` keyword argument is passed. Typically you won't need to override this method. Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the gen...
def _create_pure_shape(self, primitive_type, options, sizes, mass, precision): """ Create Pure Shape """ lua_code = "simCreatePureShape({}, {}, {{{}, {}, {}}}, {}, {{{}, {}}})".format( primitive_type, options, sizes[0], sizes[1], sizes[2], mass, precision[0], precision[1]) self._inje...
Create Pure Shape
def batch(batch_size, items): "Batch items into groups of batch_size" items = list(items) if batch_size is None: return [items] MISSING = object() padded_items = items + [MISSING] * (batch_size - 1) groups = zip(*[padded_items[i::batch_size] for i in range(batch_size)]) return [[item...
Batch items into groups of batch_size
def get_icohp_dict_by_bondlengths(self, minbondlength=0.0, maxbondlength=8.0): """ get a dict of IcohpValues corresponding to certaind bond lengths Args: minbondlength: defines the minimum of the bond lengths of the bonds maxbondlength: defines the maximum of the bond len...
get a dict of IcohpValues corresponding to certaind bond lengths Args: minbondlength: defines the minimum of the bond lengths of the bonds maxbondlength: defines the maximum of the bond lengths of the bonds Returns: dict of IcohpValues, the keys correspond to the val...
def wite_to_json(self, dir_path="", file_name=""): """将性能数据写入文件.""" # 提取数据 data = { "plot_data": self.record_thread.profile_data, "method_exec_info": self.method_exec_info, "search_file": self.search_file, "source_file": self.source_file} #...
将性能数据写入文件.
def modify_document(self, doc): ''' Execute the configured ``main.py`` or ``main.ipynb`` to modify the document. This method will also search the app directory for any theme or template files, and automatically configure the document with them if they are found. ''' ...
Execute the configured ``main.py`` or ``main.ipynb`` to modify the document. This method will also search the app directory for any theme or template files, and automatically configure the document with them if they are found.
def getboolean_optional(self, section, option, default=False): """ Get an option boolean value for a given section If the section or the option are not found, the default value is returned :param section: config section :param option: config option :param default: default value ...
Get an option boolean value for a given section If the section or the option are not found, the default value is returned :param section: config section :param option: config option :param default: default value :returns: boolean config value
def generate_fault_source_model(self): ''' Creates a resulting `openquake.hmtk` fault source set. :returns: source_model - list of instances of either the :class: `openquake.hmtk.sources.simple_fault_source.mtkSimpleFaultSource` or :class: `openqu...
Creates a resulting `openquake.hmtk` fault source set. :returns: source_model - list of instances of either the :class: `openquake.hmtk.sources.simple_fault_source.mtkSimpleFaultSource` or :class: `openquake.hmtk.sources.complex_fault_source.mtkComplexFaultSource...
def to_str(data): """Takes an input str or bytes object and returns an equivalent str object. :param data: Input data :type data: str or bytes :returns: Data normalized to str :rtype: str """ if isinstance(data, bytes): return codecs.decode(data, aws_encryption_sdk.internal.defaults...
Takes an input str or bytes object and returns an equivalent str object. :param data: Input data :type data: str or bytes :returns: Data normalized to str :rtype: str
def _deserialize(cls, key, value, fields): """ Marshal incoming data into Python objects.""" converter = cls._get_converter_for_field(key, None, fields) return converter.deserialize(value)
Marshal incoming data into Python objects.
def create(self, comment, mentions=()): """ create comment :param comment: :param mentions: list of pair of code and type("USER", "GROUP", and so on) :return: """ data = { "app": self.app_id, "record": self.record_id, "comment"...
create comment :param comment: :param mentions: list of pair of code and type("USER", "GROUP", and so on) :return:
def get_vulnerability_chains( current_node, sink, def_use, chain=[] ): """Traverses the def-use graph to find all paths from source to sink that cause a vulnerability. Args: current_node() sink() def_use(dict): chain(list(Node)): A path of nodes between source an...
Traverses the def-use graph to find all paths from source to sink that cause a vulnerability. Args: current_node() sink() def_use(dict): chain(list(Node)): A path of nodes between source and sink.
def get_edge_values(self, feature='idx'): """ Returns edge values in the order they are plotted (see .get_edges()) """ elist = [] for cidx in self._coords.edges[:, 1]: node = self.treenode.search_nodes(idx=cidx)[0] elist.append( (node.__get...
Returns edge values in the order they are plotted (see .get_edges())
def _log_A_0(params, freq, recency, age): """log_A_0.""" r, alpha, s, beta = params if alpha < beta: min_of_alpha_beta, max_of_alpha_beta, t = (alpha, beta, r + freq) else: min_of_alpha_beta, max_of_alpha_beta, t = (beta, alpha, s + 1) abs_alpha_beta = ma...
log_A_0.
def tangle(*args, **kwargs): """ Shortcut to create a new, custom Tangle model. Use instead of directly subclassing `Tangle`. A new, custom Widget class is created, with each of `kwargs` as a traitlet. Returns an instance of the new class with default values. `kwargs` options - primitive ...
Shortcut to create a new, custom Tangle model. Use instead of directly subclassing `Tangle`. A new, custom Widget class is created, with each of `kwargs` as a traitlet. Returns an instance of the new class with default values. `kwargs` options - primitive types (int, bool, float) will be created ...