code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def insert(self, i, tag1, tag2, cmd="prevtag", x=None, y=None): """ Inserts a new rule that updates words with tag1 to tag2, given constraints x and y, e.g., Context.append("TO < NN", "VB") """ if " < " in tag1 and not x and not y: tag1, x = tag1.split(" < "); cmd="prevta...
Inserts a new rule that updates words with tag1 to tag2, given constraints x and y, e.g., Context.append("TO < NN", "VB")
def loadPage(self, number=0): """loadPage(self, number=0) -> Page""" if self.isClosed or self.isEncrypted: raise ValueError("operation illegal for closed / encrypted doc") val = _fitz.Document_loadPage(self, number) if val: val.thisown = True val.par...
loadPage(self, number=0) -> Page
def start(self, hash, name=None, service='facebook'): """ Start a recording for the provided hash :param hash: The hash to start recording with :type hash: str :param name: The name of the recording :type name: str :param service: The service for this...
Start a recording for the provided hash :param hash: The hash to start recording with :type hash: str :param name: The name of the recording :type name: str :param service: The service for this API call (facebook, etc) :type service: str ...
def create_double(self, value: float) -> Double: """ Creates a new :class:`ConstantDouble`, adding it to the pool and returning it. :param value: The value of the new Double. """ self.append((6, value)) self.append(None) return self.get(self.raw_count - 2...
Creates a new :class:`ConstantDouble`, adding it to the pool and returning it. :param value: The value of the new Double.
def loads(self, param): ''' Checks the return parameters generating new proxy instances to avoid query concurrences from shared proxies and creating proxies for actors from another host. ''' if isinstance(param, ProxyRef): try: return self.look...
Checks the return parameters generating new proxy instances to avoid query concurrences from shared proxies and creating proxies for actors from another host.
def concat_variant_files(orig_files, out_file, regions, ref_file, config): """Concatenate multiple variant files from regions into a single output file. Uses GATK4's GatherVcfs, falling back to bcftools concat --naive if it fails. These both only combine samples and avoid parsing, allowing scaling to large...
Concatenate multiple variant files from regions into a single output file. Uses GATK4's GatherVcfs, falling back to bcftools concat --naive if it fails. These both only combine samples and avoid parsing, allowing scaling to large file sizes.
def math_func(f): """ Statics the methods. wut. """ @wraps(f) def wrapper(*args, **kwargs): if len(args) > 0: return_type = type(args[0]) if kwargs.has_key('return_type'): return_type = kwargs['return_type'] kwargs.pop('return_type') re...
Statics the methods. wut.
def setText(self, label, default='', description='Set Text', format='text'): """ Set text in a notebook pipeline (via interaction or with nbconvert) """ obj = self.load(label) if obj == None: obj=default self.save(obj, label) # initialize with default textw = T...
Set text in a notebook pipeline (via interaction or with nbconvert)
def dump(self, fields=None, exclude=None): """ Dump current object to dict, but the value is string for manytomany fields will not automatically be dumpped, only when they are given in fields parameter """ exclude = exclude or [] d = {} if fields and self....
Dump current object to dict, but the value is string for manytomany fields will not automatically be dumpped, only when they are given in fields parameter
def NameImport(package, as_name=None, prefix=None): """ Accepts a package (Name node), name to import it as (string), and optional prefix and returns a node: import <package> [as <as_name>] """ if prefix is None: prefix = u"" children = [Name(u"import", prefix=prefix), package] i...
Accepts a package (Name node), name to import it as (string), and optional prefix and returns a node: import <package> [as <as_name>]
def convert2hdf5(ClassIn, platform_name, bandnames, scale=1e-06): """Retrieve original RSR data and convert to internal hdf5 format. *scale* is the number which has to be multiplied to the wavelength data in order to get it in the SI unit meter """ import h5py instr = ClassIn(bandnames[0], pl...
Retrieve original RSR data and convert to internal hdf5 format. *scale* is the number which has to be multiplied to the wavelength data in order to get it in the SI unit meter
def eth_getCode(self, address, block=BLOCK_TAG_LATEST): """https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode :param address: Address of contract :type address: str :param block: Block tag or number (optional) :type block: int or BLOCK_TAGS :return: code ...
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode :param address: Address of contract :type address: str :param block: Block tag or number (optional) :type block: int or BLOCK_TAGS :return: code :rtype: str
def save_fig_with_metadata(fig, filename, fig_kwds=None, **kwds): """ Save plot to file with metadata included. Kewords translate to metadata that is stored directly in the plot file. Limited format types available. Parameters ---------- fig: matplotlib figure The matplotlib figure to save ...
Save plot to file with metadata included. Kewords translate to metadata that is stored directly in the plot file. Limited format types available. Parameters ---------- fig: matplotlib figure The matplotlib figure to save to the file filename: str Name of file to store the plot.
def roc(self): """ROC plot """ return plot.roc(self.y_true, self.y_score, ax=_gen_ax())
ROC plot
def parse_package_for_version(name): """ Searches for a variable named __version__ in name's __init__.py file and returns the value. This function parses the source text. It does not load the module. """ from utool import util_regex init_fpath = join(name, '__init__.py') version_errmsg ...
Searches for a variable named __version__ in name's __init__.py file and returns the value. This function parses the source text. It does not load the module.
def dump_json(token_dict, dump_path): """write json data to file """ if sys.version > '3': with open(dump_path, 'w', encoding='utf-8') as output_file: json.dump(token_dict, output_file, indent=4) else: with open(dump_path, 'w') as output_file: json.dump(token_dict...
write json data to file
def execute_download_request(request): """ Executes download request. :param request: DownloadRequest to be executed :type request: DownloadRequest :return: downloaded data or None :rtype: numpy array, other possible data type or None :raises: DownloadFailedException """ if request.save...
Executes download request. :param request: DownloadRequest to be executed :type request: DownloadRequest :return: downloaded data or None :rtype: numpy array, other possible data type or None :raises: DownloadFailedException
def submit_job(self, bundle, job_config=None): """Submit a Streams Application Bundle (sab file) to this Streaming Analytics service. Args: bundle(str): path to a Streams application bundle (sab file) containing the application to be submitted job...
Submit a Streams Application Bundle (sab file) to this Streaming Analytics service. Args: bundle(str): path to a Streams application bundle (sab file) containing the application to be submitted job_config(JobConfig): a job configuration overlay ...
def raw_clean(self, datas): """ Apply a cleaning on raw datas. """ datas = strip_tags(datas) # Remove HTML datas = STOP_WORDS.rebase(datas, '') # Remove STOP WORDS datas = PUNCTUATION.sub('', datas) # Remove punctuation datas = datas.lower() ...
Apply a cleaning on raw datas.
def get_view(self): """ Get the page to display. If a view has already been created and is cached, use that otherwise initialize the view and proxy. If defer loading is used, wrap the view in a FrameLayout and defer add view until later. """ d = self.declaratio...
Get the page to display. If a view has already been created and is cached, use that otherwise initialize the view and proxy. If defer loading is used, wrap the view in a FrameLayout and defer add view until later.
def purge_archives(self): """ Delete older archived items. Use the class attribute NUM_KEEP_ARCHIVED to control how many items are kept. """ klass = self.get_version_class() qs = klass.normal.filter(object_id=self.object_id, stat...
Delete older archived items. Use the class attribute NUM_KEEP_ARCHIVED to control how many items are kept.
def migratable_vc_domains(self): """ Gets the VC Migration Manager API client. Returns: MigratableVcDomains: """ if not self.__migratable_vc_domains: self.__migratable_vc_domains = MigratableVcDomains(self.__connection) return self.__migratable_vc...
Gets the VC Migration Manager API client. Returns: MigratableVcDomains:
def deserialize(self, obj): """Deserialize an object from the front-end.""" if obj['immutable']: return obj['value'] else: guid = obj['value'] if not guid in object_registry: instance = JSObject(self, guid) object_registry[guid]...
Deserialize an object from the front-end.
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile): """ NWSRFS Read from File Method """ # Set file extension property self.fileExtension = extension # Open file and parse with open(path, 'r') as nw...
NWSRFS Read from File Method
def changeLogType(self): '''Populate log program list to correspond with log type selection.''' logType = self.selectedType() programs = self.logList.get(logType)[0] default = self.logList.get(logType)[1] if logType in self.logList: self.programName.clear() ...
Populate log program list to correspond with log type selection.
def get_monolayer(self): """ Returns the primitive unit surface area density of the adsorbate. """ unit_a = self.get_unit_primitive_area Nsurfs = self.Nsurfs_ads_in_slab Nads = self.Nads_in_slab return Nads / (unit_a * Nsurfs)
Returns the primitive unit surface area density of the adsorbate.
def use(self, *middleware: MiddlewareType) -> None: """ Register Middleware :param middleware: The Middleware Function """ for m in middleware: if is_middleware(m): self.middleware.append(m)
Register Middleware :param middleware: The Middleware Function
def to_simplex(y): r""" Interprets the last index of ``y`` as stick breaking fractions in logit space and returns a non-negative array of the same shape where the last dimension always sums to unity. A unit simplex is a list of non-negative numbers :math:`(x_1,...,x_K)` that sum to one, :...
r""" Interprets the last index of ``y`` as stick breaking fractions in logit space and returns a non-negative array of the same shape where the last dimension always sums to unity. A unit simplex is a list of non-negative numbers :math:`(x_1,...,x_K)` that sum to one, :math:`\sum_{k=1}^K x_k=...
def survival_rate(work_db): """Calcuate the survival rate for the results in a WorkDB. """ kills = sum(r.is_killed for _, r in work_db.results) num_results = work_db.num_results if not num_results: return 0 return (1 - kills / num_results) * 100
Calcuate the survival rate for the results in a WorkDB.
def _temporal_distance_cdf(self): """ Temporal distance cumulative density function. Returns ------- x_values: numpy.array values for the x-axis cdf: numpy.array cdf values """ distance_split_points = set() for block in sel...
Temporal distance cumulative density function. Returns ------- x_values: numpy.array values for the x-axis cdf: numpy.array cdf values
def d3logpdf_dlink3(self, link_f, y, Y_metadata=None): """ Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ ...
Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ \\alpha_{i} = \\beta y_{i} :param link_f: latent variables link(...
async def start(self): """Start the supervisor server.""" await self.server.start() self.port = self.server.port
Start the supervisor server.
def reject_source(ident, comment): '''Reject a source for automatic harvesting''' source = get_source(ident) source.validation.on = datetime.now() source.validation.comment = comment source.validation.state = VALIDATION_REFUSED if current_user.is_authenticated: source.validation.by = cur...
Reject a source for automatic harvesting
def Histograms(self, run, tag): """Retrieve the histogram events associated with a run and tag. Args: run: A string name of the run for which values are retrieved. tag: A string name of the tag for which values are retrieved. Raises: KeyError: If the run is not found, or the tag is not a...
Retrieve the histogram events associated with a run and tag. Args: run: A string name of the run for which values are retrieved. tag: A string name of the tag for which values are retrieved. Raises: KeyError: If the run is not found, or the tag is not available for the given run. ...
def resize_image_with_crop_or_pad(image, target_height, target_width, dynamic_shape=False): """Crops and/or pads an image to a target width and height. Resizes an image to a target width and height by either centrally cropping the image or padding it evenly with zeros. If `wi...
Crops and/or pads an image to a target width and height. Resizes an image to a target width and height by either centrally cropping the image or padding it evenly with zeros. If `width` or `height` is greater than the specified `target_width` or `target_height` respectively, this op centrally crops along that...
def _submit(primitive, port_index, tuple_): """Internal method to submit a tuple""" args = (_get_opc(primitive), port_index, tuple_) _ec._submit(args)
Internal method to submit a tuple
def notify_about_new_variables(callback): """Calls `callback(var)` for all newly created variables. Callback should not modify the variable passed in. Use cases that require variables to be modified should use `variable_creator_scope` directly and sit within the variable creator stack. >>> variables = [] ...
Calls `callback(var)` for all newly created variables. Callback should not modify the variable passed in. Use cases that require variables to be modified should use `variable_creator_scope` directly and sit within the variable creator stack. >>> variables = [] >>> with notify_about_variables(variables.appen...
def scalars_impl(self, run, tag_regex_string): """Given a tag regex and single run, return ScalarEvents. Args: run: A run string. tag_regex_string: A regular expression that captures portions of tags. Raises: ValueError: if the scalars plugin is not registered. Returns: A dict...
Given a tag regex and single run, return ScalarEvents. Args: run: A run string. tag_regex_string: A regular expression that captures portions of tags. Raises: ValueError: if the scalars plugin is not registered. Returns: A dictionary that is the JSON-able response.
def _build_full_partition( optional_parts, sequence_var_partition: Sequence[int], subjects: Sequence[Expression], operation: Operation ) -> List[Sequence[Expression]]: """Distribute subject operands among pattern operands. Given a partitoning for the variable part of the operands (i.e. a list of how ma...
Distribute subject operands among pattern operands. Given a partitoning for the variable part of the operands (i.e. a list of how many extra operands each sequence variable gets assigned).
def sig_cmp(sig1, sig2): """ Compares two normalized type signatures for validation purposes. """ types1 = sig1.required types2 = sig2.required if len(types1) != len(types2): return False dup_pos = [] dup_kw = {} for t1, t2 in zip(types1, types2): match = type_cmp(t1,...
Compares two normalized type signatures for validation purposes.
def get_node_by_id(self, node_id): """ Gets a node with requested ID. Returns a tuple, where first value is node ID, second - a dictionary of all node attributes. :param node_id: string with ID of node. """ tmp_nodes = self.diagram_graph.nodes(data=True) for node...
Gets a node with requested ID. Returns a tuple, where first value is node ID, second - a dictionary of all node attributes. :param node_id: string with ID of node.
def _process_if(self, node): """Process an if node.""" creg_name = node.children[0].name creg = self.dag.cregs[creg_name] cval = node.children[1].value self.condition = (creg, cval) self._process_node(node.children[2]) self.condition = None
Process an if node.
def postSolve(self): ''' This method adds consumption at m=0 to the list of stable arm points, then constructs the consumption function as a cubic interpolation over those points. Should be run after the backshooting routine is complete. Parameters ---------- no...
This method adds consumption at m=0 to the list of stable arm points, then constructs the consumption function as a cubic interpolation over those points. Should be run after the backshooting routine is complete. Parameters ---------- none Returns ------- ...
def fit(self, X): """Fit the PyNNDescent transformer to build KNN graphs with neighbors given by the dataset X. Parameters ---------- X : array-like, shape (n_samples, n_features) Sample data Returns ------- transformer : PyNNDescentTransform...
Fit the PyNNDescent transformer to build KNN graphs with neighbors given by the dataset X. Parameters ---------- X : array-like, shape (n_samples, n_features) Sample data Returns ------- transformer : PyNNDescentTransformer The trained tr...
def prune(self, cutoff: int = 2): """ Prunes the CAG by removing redundant paths. If there are multiple (directed) paths between two nodes, this function removes all but the longest paths. Subsequently, it restricts the graph to the largest connected component. Args: ...
Prunes the CAG by removing redundant paths. If there are multiple (directed) paths between two nodes, this function removes all but the longest paths. Subsequently, it restricts the graph to the largest connected component. Args: cutoff: The maximum path length to consider f...
def convert_notebook(self, name): """Converts a notebook into a python file.""" #subprocess.call(["jupyter","nbconvert","--to","python", # self.get_path("%s.ipynb"%name)]) exporter = nbconvert.exporters.python.PythonExporter() relative_path = self.convert_path(nam...
Converts a notebook into a python file.
def is_repository(self, path): """ Check if there is a Repository in path. :Parameters: #. path (string): The real path of the directory where to check if there is a repository. :Returns: #. result (boolean): Whether it's a repository or not. ...
Check if there is a Repository in path. :Parameters: #. path (string): The real path of the directory where to check if there is a repository. :Returns: #. result (boolean): Whether it's a repository or not.
def clear_weights(self): ''' clear weights of the graph ''' self.weighted = False for layer in self.layer_list: layer.weights = None
clear weights of the graph
def get_processed_data(self, *args, **kwargs): """ Get and process forecast data. Parameters ---------- *args: positional arguments Passed to get_data **kwargs: keyword arguments Passed to get_data and process_data Returns -------...
Get and process forecast data. Parameters ---------- *args: positional arguments Passed to get_data **kwargs: keyword arguments Passed to get_data and process_data Returns ------- data: DataFrame Processed forecast data
def process(self, batch, device=None): """ Process a list of examples to create a torch.Tensor. Pad, numericalize, and postprocess a batch and create a tensor. Args: batch (list(object)): A list of object from a batch of examples. Returns: torch.autograd.Variabl...
Process a list of examples to create a torch.Tensor. Pad, numericalize, and postprocess a batch and create a tensor. Args: batch (list(object)): A list of object from a batch of examples. Returns: torch.autograd.Variable: Processed object given the input and...
def validate(self, auth_rest): """Validate user credentials whether format is right for Sha1 :param auth_rest: User credentials' part without auth_type :return: Dict with a hash and a salt part of user credentials :raises ValueError: If credentials' part doesn't contain delimiter ...
Validate user credentials whether format is right for Sha1 :param auth_rest: User credentials' part without auth_type :return: Dict with a hash and a salt part of user credentials :raises ValueError: If credentials' part doesn't contain delimiter between a salt and a...
def add_color_stop_rgb(self, offset, red, green, blue): """Same as :meth:`add_color_stop_rgba` with ``alpha=1``. Kept for compatibility with pycairo. """ cairo.cairo_pattern_add_color_stop_rgb( self._pointer, offset, red, green, blue) self._check_status()
Same as :meth:`add_color_stop_rgba` with ``alpha=1``. Kept for compatibility with pycairo.
def is_initialised( self ): """ Check whether the simulation has been initialised. Args: None Returns: None """ if not self.lattice: raise AttributeError('Running a simulation needs the lattice to be initialised') if not self....
Check whether the simulation has been initialised. Args: None Returns: None
def save_figures(image_path, fig_count, gallery_conf): """Save all open matplotlib figures of the example code-block Parameters ---------- image_path : str Path where plots are saved (format string which accepts figure number) fig_count : int Previous figure number count. Figure num...
Save all open matplotlib figures of the example code-block Parameters ---------- image_path : str Path where plots are saved (format string which accepts figure number) fig_count : int Previous figure number count. Figure number add from this number Returns ------- list of ...
def coerce(from_, to, **to_kwargs): """ A preprocessing decorator that coerces inputs of a given type by passing them to a callable. Parameters ---------- from : type or tuple or types Inputs types on which to call ``to``. to : function Coercion function to call on inputs. ...
A preprocessing decorator that coerces inputs of a given type by passing them to a callable. Parameters ---------- from : type or tuple or types Inputs types on which to call ``to``. to : function Coercion function to call on inputs. **to_kwargs Additional keywords to fo...
def verify_quote(self, quote_id, extra): """Verifies that a quote order is valid. :: extras = { 'hardware': {'hostname': 'test', 'domain': 'testing.com'}, 'quantity': 2 } manager = ordering.OrderingManager(env.client) resu...
Verifies that a quote order is valid. :: extras = { 'hardware': {'hostname': 'test', 'domain': 'testing.com'}, 'quantity': 2 } manager = ordering.OrderingManager(env.client) result = manager.verify_quote(12345, extras) :...
def oauthgetm(method, param_dict, socket_timeout=None): try: import oauth2 # lazy import this so oauth2 is not a hard dep except ImportError: raise Exception("You must install the python-oauth2 library to use this method.") """ Call the api! With Oauth! Param_dict is a *regular* *p...
Call the api! With Oauth! Param_dict is a *regular* *python* *dictionary* so if you want to have multi-valued params put them in a list. ** note, if we require 2.6, we can get rid of this timeout munging.
def get_cur_batch(items): """Retrieve name of the batch shared between all items in a group. """ batches = [] for data in items: batch = tz.get_in(["metadata", "batch"], data, []) batches.append(set(batch) if isinstance(batch, (list, tuple)) else set([batch])) combo_batches = reduce(...
Retrieve name of the batch shared between all items in a group.
def on_rabbitmq_close(self, reply_code, reply_text): """Called when RabbitMQ has been connected to. :param int reply_code: The code for the disconnect :param str reply_text: The disconnect reason """ global rabbitmq_connection LOGGER.warning('RabbitMQ has disconnected (...
Called when RabbitMQ has been connected to. :param int reply_code: The code for the disconnect :param str reply_text: The disconnect reason
def run_parallel(pipeline, input_gen, options={}, ncpu=4, chunksize=200): """ Run a pipeline in parallel over a input generator cutting it into small chunks. >>> # if we have a simple component >>> from reliure.pipeline import Composable >>> # that we want to run over a given input: >>> input =...
Run a pipeline in parallel over a input generator cutting it into small chunks. >>> # if we have a simple component >>> from reliure.pipeline import Composable >>> # that we want to run over a given input: >>> input = "abcde" >>> import string >>> pipeline = Composable(lambda letters: (l.up...
def append_md5_if_too_long(component, size): """ Trims the component if it is longer than size and appends the component's md5. Total must be of length size. :param str component: component to work on :param int size: component's size limit :return str: component and app...
Trims the component if it is longer than size and appends the component's md5. Total must be of length size. :param str component: component to work on :param int size: component's size limit :return str: component and appended md5 trimmed to be of length size
def before_insert(mapper, conn, target): """event.listen method for Sqlalchemy to set the seqience_id for this object and create an ObjectNumber value for the id_""" # from identity import ObjectNumber # assert not target.fk_vid or not ObjectNumber.parse(target.fk_vid).revision ...
event.listen method for Sqlalchemy to set the seqience_id for this object and create an ObjectNumber value for the id_
def _check_dedup(data): """Check configuration for de-duplication. Defaults to no de-duplication for RNA-seq and small RNA, the back compatible default. Allow overwriting with explicit `mark_duplicates: true` setting. Also defaults to false for no alignment inputs. """ if dd.get_analysis(da...
Check configuration for de-duplication. Defaults to no de-duplication for RNA-seq and small RNA, the back compatible default. Allow overwriting with explicit `mark_duplicates: true` setting. Also defaults to false for no alignment inputs.
def instance_absent(name, instance_name=None, instance_id=None, release_eip=False, region=None, key=None, keyid=None, profile=None, filters=None): ''' Ensure an EC2 instance does not exist (is stopped and removed). .. versionchanged:: 2016.11.0 name (str...
Ensure an EC2 instance does not exist (is stopped and removed). .. versionchanged:: 2016.11.0 name (string) - The name of the state definition. instance_name (string) - The name of the instance. instance_id (string) - The ID of the instance. release_eip (bool) - R...
def interpolation_points(self, N): """ N Chebyshev points in [-1, 1], boundaries included """ if N == 1: return np.array([0.]) return np.cos(np.arange(N)*np.pi/(N-1))
N Chebyshev points in [-1, 1], boundaries included
def _GetAttributes(self): """Retrieves the attributes. Returns: list[NTFSAttribute]: attributes. """ if self._attributes is None: self._attributes = [] for fsntfs_attribute in self._fsntfs_file_entry.attributes: attribute_class = self._ATTRIBUTE_TYPE_CLASS_MAPPINGS.get( ...
Retrieves the attributes. Returns: list[NTFSAttribute]: attributes.
def sorted_stats(self): """Get the stats sorted by an alias (if present) or key.""" key = self.get_key() return sorted(self.stats, key=lambda stat: tuple(map( lambda part: int(part) if part.isdigit() else part.lower(), re.split(r"(\d+|\D+)", self.has_alias(stat[key]) or s...
Get the stats sorted by an alias (if present) or key.
def _aggr_mean(inList): """ Returns mean of non-None elements of the list """ aggrSum = 0 nonNone = 0 for elem in inList: if elem != SENTINEL_VALUE_FOR_MISSING_DATA: aggrSum += elem nonNone += 1 if nonNone != 0: return aggrSum / nonNone else: return None
Returns mean of non-None elements of the list
def get_go2color_inst(self, hdrgo): """Get a copy of go2color with GO group header colored.""" go2color = self.go2color.copy() go2color[hdrgo] = self.hdrgo_dflt_color return go2color
Get a copy of go2color with GO group header colored.
def parse_header(header): """ Convert a list of the form `['fieldname:fieldtype:fieldsize',...]` into a numpy composite dtype. The parser understands headers generated by :func:`openquake.commonlib.writers.build_header`. Here is an example: >>> parse_header(['PGA:float32', 'PGV', 'avg:float32:2...
Convert a list of the form `['fieldname:fieldtype:fieldsize',...]` into a numpy composite dtype. The parser understands headers generated by :func:`openquake.commonlib.writers.build_header`. Here is an example: >>> parse_header(['PGA:float32', 'PGV', 'avg:float32:2']) (['PGA', 'PGV', 'avg'], dtype(...
def run_from_argv(self, prog, subcommand, global_options, argv): """ Set up any environment changes requested, then run this command. """ self.prog_name = prog parser = self.create_parser(prog, subcommand) options, args = parser.parse_args(argv) self.global_o...
Set up any environment changes requested, then run this command.
def from_chars(chars): """Make Pauli's Term from chars which is written by "X", "Y", "Z" or "I". e.g. "XZIY" => X(0) * Z(1) * Y(3) Args: chars (str): Written in "X", "Y", "Z" or "I". Returns: Term: A `Term` object. Raises: ValueError: When c...
Make Pauli's Term from chars which is written by "X", "Y", "Z" or "I". e.g. "XZIY" => X(0) * Z(1) * Y(3) Args: chars (str): Written in "X", "Y", "Z" or "I". Returns: Term: A `Term` object. Raises: ValueError: When chars conteins the character which ...
def log_cert_info(logger, msg_str, cert_obj): """Dump basic certificate values to the log. Args: logger: Logger Logger to which to write the certificate values. msg_str: str A message to write to the log before the certificate values. cert_obj: cryptography.Certificate ...
Dump basic certificate values to the log. Args: logger: Logger Logger to which to write the certificate values. msg_str: str A message to write to the log before the certificate values. cert_obj: cryptography.Certificate Certificate containing values to log. Returns...
def wait_for_element_to_disappear(self, locator, params=None, timeout=None): """ Waits until the element is not visible (hidden) or no longer attached to the DOM. Raises TimeoutException if element does not become invisible. :param locator: locator tuple or WebElement instance ...
Waits until the element is not visible (hidden) or no longer attached to the DOM. Raises TimeoutException if element does not become invisible. :param locator: locator tuple or WebElement instance :param params: (optional) locator params :param timeout: (optional) time to wait for elem...
def request(self, method, path, query=None, content=None): """ Sends an HTTP request. This constructs a full URL, encodes and decodes HTTP bodies, and handles invalid responses in a pythonic way. @type method: string @param method: HTTP method to use @type path:...
Sends an HTTP request. This constructs a full URL, encodes and decodes HTTP bodies, and handles invalid responses in a pythonic way. @type method: string @param method: HTTP method to use @type path: string @param path: HTTP URL path @type query: list of two-tup...
async def provStacks(self, offs, size): ''' Return stream of (iden, provenance stack) tuples at the given offset. ''' count = 0 for iden, stack in self.cell.provstor.provStacks(offs, size): count += 1 if not count % 1000: await asyncio.slee...
Return stream of (iden, provenance stack) tuples at the given offset.
def process_pkcs7(self, data, name): """ Process PKCS7 signature with certificate in it. :param data: :param name: :return: """ from cryptography.hazmat.backends.openssl.backend import backend from cryptography.hazmat.backends.openssl.x509 import _Certific...
Process PKCS7 signature with certificate in it. :param data: :param name: :return:
def pexpire(self, key, milliseconds): """Emulate pexpire""" return self._expire(self._encode(key), timedelta(milliseconds=milliseconds))
Emulate pexpire
def count_duplicate_starts(bam_file, sample_size=10000000): """ Return a set of x, y points where x is the number of reads sequenced and y is the number of unique start sites identified If sample size < total reads in a file the file will be downsampled. """ count = Counter() with bam.open_s...
Return a set of x, y points where x is the number of reads sequenced and y is the number of unique start sites identified If sample size < total reads in a file the file will be downsampled.
def setParent(self, other): """ Sets the parent for this layer to the inputed layer. :param other | <XNodeLayer> || None :return <bool> changed """ if self._parent == other: return False # remove this layer from its ...
Sets the parent for this layer to the inputed layer. :param other | <XNodeLayer> || None :return <bool> changed
def register(): """Uses the new style of registration based on GitHub Pelican issue #314.""" signals.initialized.connect(initialized) try: signals.content_object_init.connect(detect_content) signals.all_generators_finalized.connect(detect_images_and_galleries) signals.article_writer_...
Uses the new style of registration based on GitHub Pelican issue #314.
def get_current_temperature(self, refresh=False): """Get current temperature""" if refresh: self.refresh() try: return float(self.get_value('temperature')) except (TypeError, ValueError): return None
Get current temperature
def neg_loglik(self,beta): """ Creates the negative log likelihood of the model Parameters ---------- beta : np.array Contains untransformed starting values for latent variables Returns ---------- The negative log logliklihood of the model ""...
Creates the negative log likelihood of the model Parameters ---------- beta : np.array Contains untransformed starting values for latent variables Returns ---------- The negative log logliklihood of the model
def display(self): """Displays the network to the screen.""" size = list(range(len(self.layers))) size.reverse() for i in size: layer = self.layers[i] if layer.active: print('%s layer (size %d)' % (layer.name, layer.size)) tlabel, o...
Displays the network to the screen.
def reload(self): """ Reload file again from storage. """ text = self._read(self.location) cursor_position = min(self.buffer.cursor_position, len(text)) self.buffer.document = Document(text, cursor_position) self._file_content = text
Reload file again from storage.
def handleOneNodeMsg(self, wrappedMsg): """ Validate and process one message from a node. :param wrappedMsg: Tuple of message and the name of the node that sent the message """ try: vmsg = self.validateNodeMsg(wrappedMsg) if vmsg: ...
Validate and process one message from a node. :param wrappedMsg: Tuple of message and the name of the node that sent the message
def uint(nstr, schema): """ !~~uint """ if isinstance(nstr, basestring): if not nstr.isdigit(): return False nstr = long(nstr) elif not isinstance(nstr, (int, long)): return False return nstr > 0
!~~uint
def request_frame(self): """Construct initiating frame.""" self.session_id = get_new_session_id() return FrameCommandSendRequest(node_ids=[self.node_id], parameter=self.parameter, session_id=self.session_id)
Construct initiating frame.
def on_subscript(self, node): # ('value', 'slice', 'ctx') """Subscript handling -- one of the tricky parts.""" val = self.run(node.value) nslice = self.run(node.slice) ctx = node.ctx.__class__ if ctx in (ast.Load, ast.Store): if isinstance(node.slice, (ast.Index, a...
Subscript handling -- one of the tricky parts.
def ask_pascal_16(self, next_rva_ptr): """The next RVA is taken to be the one immediately following this one. Such RVA could indicate the natural end of the string and will be checked with the possible length contained in the first word. """ length = self.__get_...
The next RVA is taken to be the one immediately following this one. Such RVA could indicate the natural end of the string and will be checked with the possible length contained in the first word.
def update_warning(self): """ Updates the icon and tip based on the validity of the array content. """ widget = self._button_warning if not self.is_valid(): tip = _('Array dimensions not valid') widget.setIcon(ima.icon('MessageBoxWarning')) ...
Updates the icon and tip based on the validity of the array content.
def remove_namespace(doc, namespace): '''Remove namespace in the passed document in place.''' ns = u'{%s}' % namespace nsl = len(ns) for elem in doc.getiterator(): if elem.tag.startswith(ns): elem.tag = elem.tag[nsl:] elem.attrib['oxmlns'] = namespace
Remove namespace in the passed document in place.
def _process_output(output, parse_json=True): """Process output.""" output = output.strip() _LOGGER.debug('Received: %s', output) if not output: return None elif 'decrypt_verify' in output: raise RequestError( 'Please compile coap-client without debug output. See ' ...
Process output.
def batch_delete_jobs( self, parent, filter_, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes a list of ``Job``\ s by filter. Example: >>> from google.cl...
Deletes a list of ``Job``\ s by filter. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.JobServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `filte...
def get_environment_requirements_list(): """ Take the requirements list from the current running environment :return: string """ requirement_list = [] requirements = check_output([sys.executable, '-m', 'pip', 'freeze']) for requirement in requirements.split(): requirement_list.appe...
Take the requirements list from the current running environment :return: string
def create(host, port): """ Prepare server to execute :return: Modules to execute, cmd line function :rtype: list[WrapperServer], callable | None """ wrapper = WrapperServer({ 'server': None }) d = { 'listen_port': port, 'changer': wrapper } if host: ...
Prepare server to execute :return: Modules to execute, cmd line function :rtype: list[WrapperServer], callable | None
def typed_subtopic_data(fc, subid): '''Returns typed subtopic data from an FC.''' # I don't think this code will change after we fix the data race bug. ---AG ty = subtopic_type(subid) data = get_unicode_feature(fc, subid) assert isinstance(data, unicode), \ 'data should be `unicode` but is %...
Returns typed subtopic data from an FC.
def filter(self, *args): """ 为文本 ``(text)`` 消息添加 handler 的简便方法。 使用 ``@filter("xxx")``, ``@filter(re.compile("xxx"))`` 或 ``@filter("xxx", "xxx2")`` 的形式为特定内容添加 handler。 """ def wraps(f): self.add_filter(func=f, rules=list(args)) return f r...
为文本 ``(text)`` 消息添加 handler 的简便方法。 使用 ``@filter("xxx")``, ``@filter(re.compile("xxx"))`` 或 ``@filter("xxx", "xxx2")`` 的形式为特定内容添加 handler。
def __insert_frond_LF(d_w, d_u, dfs_data): """Encapsulates the process of inserting a frond uw into the left side frond group.""" # --Add the frond to the left side dfs_data['LF'].append( (d_w, d_u) ) dfs_data['FG']['l'] += 1 dfs_data['last_inserted_side'] = 'LF'
Encapsulates the process of inserting a frond uw into the left side frond group.
def upload_file_to(self, addressinfo, timeout): """Uploads the raw firmware file to iLO Uploads the raw firmware file (already set as attribute in FirmwareImageControllerBase constructor) to iLO, whose address information is passed to this method. :param addressinfo: tuple of ho...
Uploads the raw firmware file to iLO Uploads the raw firmware file (already set as attribute in FirmwareImageControllerBase constructor) to iLO, whose address information is passed to this method. :param addressinfo: tuple of hostname and port of the iLO :param timeout: timeout ...