code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def _remove_wire_nets(block): """ Remove all wire nodes from the block. """ wire_src_dict = _ProducerList() wire_removal_set = set() # set of all wirevectors to be removed # one pass to build the map of value producers and # all of the nets and wires to be removed for net in block.logic: ...
Remove all wire nodes from the block.
def blurred_image_1d_from_1d_unblurred_and_blurring_images(unblurred_image_1d, blurring_image_1d, convolver): """For a 1D masked image and 1D blurring image (the regions outside the mask whose light blurs \ into the mask after PSF convolution), use both to compute the blurred image within the mask via PSF convo...
For a 1D masked image and 1D blurring image (the regions outside the mask whose light blurs \ into the mask after PSF convolution), use both to compute the blurred image within the mask via PSF convolution. The convolution uses each image's convolver (*See ccd.convolution*). Parameters ---------- ...
def show_system_info_output_show_system_info_rbridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_system_info = ET.Element("show_system_info") config = show_system_info output = ET.SubElement(show_system_info, "output") show_sy...
Auto Generated Code
def set_extend(self, extend): """ Sets the mode to be used for drawing outside the area of this pattern. See :ref:`EXTEND` for details on the semantics of each extend strategy. The default extend mode is :obj:`NONE <EXTEND_NONE>` for :class:`SurfacePattern` and :obj:`PAD...
Sets the mode to be used for drawing outside the area of this pattern. See :ref:`EXTEND` for details on the semantics of each extend strategy. The default extend mode is :obj:`NONE <EXTEND_NONE>` for :class:`SurfacePattern` and :obj:`PAD <EXTEND_PAD>` for :class:`Gradient` patterns.
def get_lines_from_file(filename, lineno, context_lines): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). """ def get_lines(start, end): return [linecache.getline(filename, l).rstrip() for l in range(start, ...
Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context).
def _make_args_checker(self): """ Create a function that checks signature of the source function. """ def _checker(*args, **kws): # Check if too many arguments are provided nargs = len(args) nnonvaargs = min(nargs, self._max_positional_args) ...
Create a function that checks signature of the source function.
def run(itf): """ Run postanalyze functions. """ if not itf: return 1 # access user input options = SplitInput(itf) # check input args error_check(options) # read input files try: molecules, ensemble_lookup = ReadFiles(options) except: return 1 if o...
Run postanalyze functions.
def get_items(self, from_date, url, expand_fields=True): """Retrieve all the items from a given date. :param url: endpoint API url :param from_date: obtain items updated since this date :param expand_fields: if True, it includes the expand fields in the payload """ start...
Retrieve all the items from a given date. :param url: endpoint API url :param from_date: obtain items updated since this date :param expand_fields: if True, it includes the expand fields in the payload
def timeago(tz=None, *args, **kwargs): """Return a datetime so much time ago. Takes the same arguments as timedelta().""" return totz(datetime.now(), tz) - timedelta(*args, **kwargs)
Return a datetime so much time ago. Takes the same arguments as timedelta().
def build_agg_vec(agg_vec, **source): """ Builds an combined aggregation vector based on various classifications This function build an aggregation vector based on the order in agg_vec. The naming and actual mapping is given in source, either explicitly or by pointing to a folder with the mapping. ...
Builds an combined aggregation vector based on various classifications This function build an aggregation vector based on the order in agg_vec. The naming and actual mapping is given in source, either explicitly or by pointing to a folder with the mapping. >>> build_agg_vec(['EU', 'OECD'], path = 'tes...
def plot_neuron3d(ax, nrn, neurite_type=NeuriteType.all, diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH, color=None, alpha=_ALPHA): ''' Generates a figure of the neuron, that contains a soma and a list of trees. Args: ax(matplotlib axes): on what to plo...
Generates a figure of the neuron, that contains a soma and a list of trees. Args: ax(matplotlib axes): on what to plot nrn(neuron): neuron to be plotted neurite_type(NeuriteType): an optional filter on the neurite type diameter_scale(float): Scale factor multiplied with segment ...
def get_unawarded_user_ids(self, db_read=None): """ Returns unawarded user ids (need to be saved) and the count. """ db_read = db_read or self.db_read already_awarded_ids = self.get_already_awarded_user_ids(db_read=db_read) current_ids = self.get_current_user_ids(db_read...
Returns unawarded user ids (need to be saved) and the count.
def _get_code_w_scope(data, position, obj_end, opts): """Decode a BSON code_w_scope to bson.code.Code.""" code, position = _get_string(data, position + 4, obj_end, opts) scope, position = _get_object(data, position, obj_end, opts) return Code(code, scope), position
Decode a BSON code_w_scope to bson.code.Code.
def remove_api_key(self): """ Removes the user's existing API key, if present, and sets the current instance's 'api_key' attribute to the empty string. Returns: `NoneType`: None. """ url = self.record_url + "/remove_api_key" res = requests.patch(url=u...
Removes the user's existing API key, if present, and sets the current instance's 'api_key' attribute to the empty string. Returns: `NoneType`: None.
def subscribe(self, tag, fun, description=None): """ Subscribe to something and register a function """ self.methods[tag] = fun self.descriptions[tag] = description self.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, tag)
Subscribe to something and register a function
def update_binary_stats(self, label, pred): """ Update various binary classification counts for a single (label, pred) pair. Parameters ---------- label : `NDArray` The labels of the data. pred : `NDArray` Predicted values. """ ...
Update various binary classification counts for a single (label, pred) pair. Parameters ---------- label : `NDArray` The labels of the data. pred : `NDArray` Predicted values.
def solar_midnight(self, date=None, local=True): """Calculates the solar midnight (the time when the sun is at its lowest point.) :param date: The date for which to calculate the midnight time. If no date is specified then the current date will be used. :type date: ...
Calculates the solar midnight (the time when the sun is at its lowest point.) :param date: The date for which to calculate the midnight time. If no date is specified then the current date will be used. :type date: :class:`~datetime.date` :param local: True = Time...
def display_hook(fn): """ A decorator to wrap display hooks that return a MIME bundle or None. Additionally it handles adding output to the notebook archive, saves files specified with the output magic and handles tracebacks. """ @wraps(fn) def wrapped(element): global FULL_TRACEBACK...
A decorator to wrap display hooks that return a MIME bundle or None. Additionally it handles adding output to the notebook archive, saves files specified with the output magic and handles tracebacks.
def kelvin2rgb(temperature): """ Converts from Kelvin temperature to an RGB color. Algorithm credits: |tannerhelland|_ """ # range check if temperature < 1000: temperature = 1000 elif temperature > 40000: temperature = 40000 tmp_internal = temperature / 100.0 # red...
Converts from Kelvin temperature to an RGB color. Algorithm credits: |tannerhelland|_
def store_minions(opts, jid, minions, mminion=None, syndic_id=None): ''' Store additional minions matched on lower-level masters using the configured master_job_cache ''' if mminion is None: mminion = salt.minion.MasterMinion(opts, states=False, rend=False) job_cache = opts['master_job_c...
Store additional minions matched on lower-level masters using the configured master_job_cache
def init_app(self, app, **kwargs): """Initialize application object. :param app: An instance of :class:`~flask.Flask`. """ # Init the configuration self.init_config(app) # Enable Rate limiter self.limiter = Limiter(app, key_func=get_ipaddr) # Enable secur...
Initialize application object. :param app: An instance of :class:`~flask.Flask`.
def _step(self, model: TrainingModel, batch: mx.io.DataBatch, checkpoint_interval: int, metric_train: mx.metric.EvalMetric, metric_loss: Optional[mx.metric.EvalMetric] = None): """ Performs an update to model given a batch and updates...
Performs an update to model given a batch and updates metrics.
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting term. Fault type (Strike-slip, Normal, Thrust/reverse) is derived from rake angle. Rakes angles within 30 of horizontal are strike-slip, angles from 30 to 150 are reverse, and angles from ...
Returns the style-of-faulting term. Fault type (Strike-slip, Normal, Thrust/reverse) is derived from rake angle. Rakes angles within 30 of horizontal are strike-slip, angles from 30 to 150 are reverse, and angles from -30 to -150 are normal. Note that the 'Unspecified' ca...
def execute_python_script(self, script): """ Execute a python script of the remote server :param script: Inline script to convert to a file and execute remotely :return: The output of the script execution """ # Create the local file to copy to remote file_handle,...
Execute a python script of the remote server :param script: Inline script to convert to a file and execute remotely :return: The output of the script execution
def __Restore_Geometry_On_Layout_Change_checkBox_set_ui(self): """ Sets the **Restore_Geometry_On_Layout_Change_checkBox** Widget. """ # Adding settings key if it doesn't exists. self.__settings.get_key("Settings", "restore_geometry_on_layout_change").isNull() and \ self...
Sets the **Restore_Geometry_On_Layout_Change_checkBox** Widget.
def nth(iterable, n, default=None): """Returns the nth item or a default value Arguments --------- iterable : iterable n : int default : default=None The default value to return """ if type(n) != int: raise TypeError("n is not an ...
Returns the nth item or a default value Arguments --------- iterable : iterable n : int default : default=None The default value to return
def read_asc_grid(filename, footer=0): """Reads ASCII grid file (*.asc). Parameters ---------- filename : str Name of *.asc file. footer : int, optional Number of lines at bottom of *.asc file to skip. Returns ------- grid_array : numpy array, shape (M, N) ...
Reads ASCII grid file (*.asc). Parameters ---------- filename : str Name of *.asc file. footer : int, optional Number of lines at bottom of *.asc file to skip. Returns ------- grid_array : numpy array, shape (M, N) (M, N) array of grid values, where M is...
def FormatAsHexString(num, width=None, prefix="0x"): """Takes an int and returns the number formatted as a hex string.""" # Strip "0x". hex_str = hex(num)[2:] # Strip "L" for long values. hex_str = hex_str.replace("L", "") if width: hex_str = hex_str.rjust(width, "0") return "%s%s" % (prefix, hex_str)
Takes an int and returns the number formatted as a hex string.
def marketShortInterestDF(date=None, token='', version=''): '''The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report. The report data will be published daily at 4:00pm ET. https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev ...
The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report. The report data will be published daily at 4:00pm ET. https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev Args: date (datetime); Effective Datetime toke...
def import_command(dest, src, name, api=None, filter_symbol=None): """Import Command `name` and its dependencies from Registry `src` to Registry `dest` :param Registry dest: Destination Registry :param Registry src: Source Registry :param str name: Name of Command to import :param str api: Pref...
Import Command `name` and its dependencies from Registry `src` to Registry `dest` :param Registry dest: Destination Registry :param Registry src: Source Registry :param str name: Name of Command to import :param str api: Prefer to import Types with api name `api`, or None to imp...
def _login(login_func, *args): """A helper function for logging in. It's purpose is to avoid duplicate code in the login functions. """ response = login_func(*args) _fail_if_contains_errors(response) user_json = response.json() return User(user_json)
A helper function for logging in. It's purpose is to avoid duplicate code in the login functions.
def get_energy_management_properties(self): """ Return the energy management properties of the CPC. The returned energy management properties are a subset of the properties of the CPC resource, and are also available as normal properties of the CPC resource. In so far, there is ...
Return the energy management properties of the CPC. The returned energy management properties are a subset of the properties of the CPC resource, and are also available as normal properties of the CPC resource. In so far, there is no new data provided by this method. However, because on...
def uninstall_wic(self, wic_slot_number): """ Uninstalls a WIC adapter from this router. :param wic_slot_number: WIC slot number """ # WICs are always installed on adapters in slot 0 slot_number = 0 # Do not check if slot has an adapter because adapters with WI...
Uninstalls a WIC adapter from this router. :param wic_slot_number: WIC slot number
def compute_summary_statistic(iscs, summary_statistic='mean', axis=None): """Computes summary statistics for ISCs Computes either the 'mean' or 'median' across a set of ISCs. In the case of the mean, ISC values are first Fisher Z transformed (arctanh), averaged, then inverse Fisher Z transformed (tanh...
Computes summary statistics for ISCs Computes either the 'mean' or 'median' across a set of ISCs. In the case of the mean, ISC values are first Fisher Z transformed (arctanh), averaged, then inverse Fisher Z transformed (tanh). The implementation is based on the work in [SilverDunlap1987]_. .. [S...
def list_product_releases(page_size=200, page_index=0, sort="", q=""): """ List all ProductReleases """ data = list_product_releases_raw(page_size, page_index, sort, q) if data: return utils.format_json_list(data)
List all ProductReleases
def path_helper(self, path=None, operations=None, **kwargs): """ Works like a apispec plugin May return a path as string and mutate operations dict. :param str path: Path to the resource :param dict operations: A `dict` mapping HTTP methods to operation object. See ...
Works like a apispec plugin May return a path as string and mutate operations dict. :param str path: Path to the resource :param dict operations: A `dict` mapping HTTP methods to operation object. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#ope...
def set_xylims(self, lims, axes=None, panel=None): """overwrite data for trace t """ if panel is None: panel = self.current_panel self.panels[panel].set_xylims(lims, axes=axes, **kw)
overwrite data for trace t
def blockSignals( self, state ): """ Blocks the signals for this widget and its sub-parts. :param state | <bool> """ super(XLocationWidget, self).blockSignals(state) self._locationEdit.blockSignals(state) self._locationButton.blockSignals(sta...
Blocks the signals for this widget and its sub-parts. :param state | <bool>
def add_alias(self, alias, source, clean=True): """Add an alias, optionally 'cleaning' the alias string. Calls the parent `catalog` method `clean_entry_name` - to apply the same name-cleaning as is applied to entry names themselves. Returns ------- alias : str ...
Add an alias, optionally 'cleaning' the alias string. Calls the parent `catalog` method `clean_entry_name` - to apply the same name-cleaning as is applied to entry names themselves. Returns ------- alias : str The stored version of the alias (cleaned or not).
def get_unique_together_validators(self): """ Determine a default set of validators for any unique_together contraints. """ model_class_inheritance_tree = ( [self.Meta.model] + list(self.Meta.model._meta.parents.keys()) ) # The field names we're p...
Determine a default set of validators for any unique_together contraints.
def add_mandates(self, representative, rep_json): ''' Create mandates from rep data based on variant configuration ''' # Mandate in country group for party constituency if rep_json.get('parti_ratt_financier'): constituency, _ = Constituency.objects.get_or_create( ...
Create mandates from rep data based on variant configuration
def reduce_after(method): '''reduce() the result of this method call (unless you already reduced it).''' def new_method(self, *args, **kwargs): result = method(self, *args, **kwargs) if result == self: return result return result.reduce() return new_method
reduce() the result of this method call (unless you already reduced it).
def get_first_and_last(year, month): """Returns two datetimes: first day and last day of given year&month""" ym_first = make_aware( datetime.datetime(year, month, 1), get_default_timezone() ) ym_last = make_aware( datetime.datetime(year, month, monthra...
Returns two datetimes: first day and last day of given year&month
def with_json_path(self, path, field=None): """Annotate Storage objects with a specific JSON path. :param path: Path to get inside the stored object, which can be either a list of path components or a comma-separated string :param field: Optional output field name ...
Annotate Storage objects with a specific JSON path. :param path: Path to get inside the stored object, which can be either a list of path components or a comma-separated string :param field: Optional output field name
def resize(self, new_size): """Create a new larger array, and copy data over""" assert new_size > self.size new_data = self._allocate(new_size) # copy new_data[0:self.size * self.chunk_size] = self.data self.size = new_size self.data = new_data
Create a new larger array, and copy data over
def members(self, is_manager=None): """ Retrieve members of the scope. :param is_manager: (optional) set to True to return only Scope members that are also managers. :type is_manager: bool :return: List of members (usernames) Examples -------- >>> member...
Retrieve members of the scope. :param is_manager: (optional) set to True to return only Scope members that are also managers. :type is_manager: bool :return: List of members (usernames) Examples -------- >>> members = project.members() >>> managers = project.mem...
def add_notification_listener(self, notification_type, notification_callback): """ Add a notification callback to the notification center. Args: notification_type: A string representing the notification type from .helpers.enums.NotificationTypes notification_callback: closure of function to call wh...
Add a notification callback to the notification center. Args: notification_type: A string representing the notification type from .helpers.enums.NotificationTypes notification_callback: closure of function to call when event is triggered. Returns: Integer notification id used to remove the n...
def remove_user_from_group(self, username, groupname, raise_on_error=False): """Remove a user from a group Attempts to remove a user from a group Args username: The username to remove from the group. groupname: The group name to be removed from the user. Returns: ...
Remove a user from a group Attempts to remove a user from a group Args username: The username to remove from the group. groupname: The group name to be removed from the user. Returns: True: Succeeded False: If unsuccessful
def expand(self, m): """Using the template, expand the string.""" if m is None: raise ValueError("Match is None!") sep = m.string[:0] if isinstance(sep, bytes) != self._bytes: raise TypeError('Match string type does not match expander string type!') text...
Using the template, expand the string.
def os_walk_pre_35(top, topdown=True, onerror=None, followlinks=False): """Pre Python 3.5 implementation of os.walk() that doesn't use scandir.""" islink, join, isdir = os.path.islink, os.path.join, os.path.isdir try: names = os.listdir(top) except OSError as err: if onerror is not None...
Pre Python 3.5 implementation of os.walk() that doesn't use scandir.
def select(cls, dataset, selection_mask=None, **selection): """ Apply a selection to the data. """ import iris constraint = cls.select_to_constraint(dataset, selection) pre_dim_coords = [c.name() for c in dataset.data.dim_coords] indexed = cls.indexed(dataset, sel...
Apply a selection to the data.
def match_rows(rows1, rows2, key, sort_keys=True): """ Yield triples of `(value, left_rows, right_rows)` where `left_rows` and `right_rows` are lists of rows that share the same column value for *key*. This means that both *rows1* and *rows2* must have a column with the same name *key*. .. warn...
Yield triples of `(value, left_rows, right_rows)` where `left_rows` and `right_rows` are lists of rows that share the same column value for *key*. This means that both *rows1* and *rows2* must have a column with the same name *key*. .. warning:: Both *rows1* and *rows2* will exist in memory for...
def housecode_to_index(housecode): """Convert a X10 housecode to a zero-based index""" match = re.search(r'^([A-P])(\d{1,2})$', housecode.upper()) if match: house_index = int(match.group(2)) if 1 <= house_index <= 16: return (ord(match.group(1)) - ord('A')) * 16 + house_index - 1...
Convert a X10 housecode to a zero-based index
def list_vrf(self): """ List VRFs and return JSON encoded result. """ try: vrfs = VRF.list() except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) return json.dumps(vrfs, cls=NipapJSONEncoder)
List VRFs and return JSON encoded result.
def connect_mysql(host, port, user, password, database): """Connect to MySQL with retries.""" return pymysql.connect( host=host, port=port, user=user, passwd=password, db=database )
Connect to MySQL with retries.
def letras(song): """ Returns the lyrics found in letras.com for the specified mp3 file or an empty string if not found. """ translate = { '&': 'a', URLESCAPE: '', ' ': '-' } artist = song.artist.lower() artist = normalize(artist, translate) title = song.title...
Returns the lyrics found in letras.com for the specified mp3 file or an empty string if not found.
def foldl1(f: Callable[[T, T], T], xs: Iterable[T]) -> T: """ Returns the accumulated result of a binary function applied to elements of an iterable. .. math:: foldl1(f, [x_0, x_1, x_2, x_3]) = f(f(f(f(x_0, x_1), x_2), x_3) Examples -------- >>> from delphi.utils.fp import foldl1 ...
Returns the accumulated result of a binary function applied to elements of an iterable. .. math:: foldl1(f, [x_0, x_1, x_2, x_3]) = f(f(f(f(x_0, x_1), x_2), x_3) Examples -------- >>> from delphi.utils.fp import foldl1 >>> foldl1(lambda x, y: x + y, range(5)) 10
def append(args): """ %prog append bamfile Append /1 or /2 to read names. Useful for using the Tophat2 bam file for training AUGUSTUS gene models. """ p = OptionParser(append.__doc__) p.add_option("--prepend", help="Prepend string to read names") opts, args = p.parse_args(args) if ...
%prog append bamfile Append /1 or /2 to read names. Useful for using the Tophat2 bam file for training AUGUSTUS gene models.
def _is_numeric_data(self, data_type): """Private method for testing text data types.""" dt = DATA_TYPES[data_type] if dt['min'] and dt['max']: if type(self.data) is dt['type'] and dt['min'] < self.data < dt['max']: self.type = data_type.upper() self.l...
Private method for testing text data types.
def missing_categories(context): ''' Adds the categories that the user does not currently have. ''' user = user_for_context(context) categories_available = set(CategoryController.available_categories(user)) items = ItemController(user).items_pending_or_purchased() categories_held = set() for p...
Adds the categories that the user does not currently have.
def load_from_rdf_file(self, rdf_file): """Initialize given an RDF input file representing the hierarchy." Parameters ---------- rdf_file : str Path to an RDF file. """ self.graph = rdflib.Graph() self.graph.parse(os.path.abspath(rdf_file), format='nt...
Initialize given an RDF input file representing the hierarchy." Parameters ---------- rdf_file : str Path to an RDF file.
def rank(self, n, mu, sigma, crit=.5, upper=10000, xtol=1): """%(super)s Additional Parameters ---------------------- {0} """ return _make_rank(self, n, mu, sigma, crit=crit, upper=upper, xtol=xtol)
%(super)s Additional Parameters ---------------------- {0}
def save(self, session_file, verbose=False): """ Saves the current session to an existing file, which will be replaced. If this is a new session that has not been saved yet, use 'save as' instead. :param session_file: The path to the file where the current session must b...
Saves the current session to an existing file, which will be replaced. If this is a new session that has not been saved yet, use 'save as' instead. :param session_file: The path to the file where the current session must be saved to. :param verbose: print more
def authentication(self): """Generate authentication string.""" if self.session.digest: authentication = self.session.generate_digest() elif self.session.basic: authentication = self.session.generate_basic() else: return '' return "Authorizatio...
Generate authentication string.
def make_cache_keys(self, endpoint, kwargs): """ This function is built to provide cache keys for templates :param endpoint: Current endpoint :param kwargs: Keyword Arguments :return: tuple of i18n dependant cache key and i18n ignoring cache key :rtype: tuple(str) """ ...
This function is built to provide cache keys for templates :param endpoint: Current endpoint :param kwargs: Keyword Arguments :return: tuple of i18n dependant cache key and i18n ignoring cache key :rtype: tuple(str)
def get_future_days(self): """Return only future Day objects.""" today = timezone.now().date() return Day.objects.filter(date__gte=today)
Return only future Day objects.
def create_training_instances(x): """Create `TrainingInstance`s from raw text.""" (input_files, out, tokenizer, max_seq_length, dupe_factor, short_seq_prob, masked_lm_prob, max_predictions_per_seq, rng) = x time_start = time.time() logging.info('Processing %s', input_files) all_documents = [[]]...
Create `TrainingInstance`s from raw text.
def select_qadapter(self, pconfs): """ Given a list of parallel configurations, pconfs, this method select an `optimal` configuration according to some criterion as well as the :class:`QueueAdapter` to use. Args: pconfs: :class:`ParalHints` object with the list of parallel c...
Given a list of parallel configurations, pconfs, this method select an `optimal` configuration according to some criterion as well as the :class:`QueueAdapter` to use. Args: pconfs: :class:`ParalHints` object with the list of parallel configurations Returns: :class:`Par...
def upload(self, project_id, processor_name, **fields): """Upload files and data objects. :param project_id: ObjectId of Genesis project :type project_id: string :param processor_name: Processor object name :type processor_name: string :param fields: Processor field-valu...
Upload files and data objects. :param project_id: ObjectId of Genesis project :type project_id: string :param processor_name: Processor object name :type processor_name: string :param fields: Processor field-value pairs :type fields: args :rtype: HTTP Response ob...
def _executable_memory_regions(self, objects=None, force_segment=False): """ Get all executable memory regions from the binaries :param objects: A collection of binary objects to collect regions from. If None, regions from all project binary objects are used. :pa...
Get all executable memory regions from the binaries :param objects: A collection of binary objects to collect regions from. If None, regions from all project binary objects are used. :param bool force_segment: Rely on binary segments instead of sections. :return: A sorte...
def _parse_persons(self, datafield, subfield, roles=["aut"]): """ Parse persons from given datafield. Args: datafield (str): code of datafield ("010", "730", etc..) subfield (char): code of subfield ("a", "z", "4", etc..) role (list of str): set to ["any"] f...
Parse persons from given datafield. Args: datafield (str): code of datafield ("010", "730", etc..) subfield (char): code of subfield ("a", "z", "4", etc..) role (list of str): set to ["any"] for any role, ["aut"] for authors, etc.. For details see ...
def getRaw(self, context, aslist=False, **kwargs): """Grab the stored value, and return it directly as UIDs. :param context: context is the object who's schema contains this field. :type context: BaseContent :param aslist: Forces a single-valued field to return a list type. :typ...
Grab the stored value, and return it directly as UIDs. :param context: context is the object who's schema contains this field. :type context: BaseContent :param aslist: Forces a single-valued field to return a list type. :type aslist: bool :param kwargs: kwargs are passed direct...
def deserialize(self, to_deserialize: PrimitiveJsonType) \ -> Optional[Union[SerializableType, List[SerializableType]]]: """ Deserializes the given representation of the serialized object. :param to_deserialize: the serialized object as a dictionary :return: the deserialized ...
Deserializes the given representation of the serialized object. :param to_deserialize: the serialized object as a dictionary :return: the deserialized object or collection of deserialized objects
def unregister_directory(self, directory_node, raise_exception=False): """ Unregisters given :class:`umbra.components.factory.script_editor.nodes.DirectoryNode` class Node from the Model. :param directory_node: DirectoryNode to unregister. :type directory_node: DirectoryNode :pa...
Unregisters given :class:`umbra.components.factory.script_editor.nodes.DirectoryNode` class Node from the Model. :param directory_node: DirectoryNode to unregister. :type directory_node: DirectoryNode :param raise_exception: Raise the exception. :type raise_exception: bool :retu...
def import_(module_name, name): """Imports an object by a relative module path:: Profiler = import_('profiling.profiler', 'Profiler') """ module = importlib.import_module(module_name, __package__) return getattr(module, name)
Imports an object by a relative module path:: Profiler = import_('profiling.profiler', 'Profiler')
def score(self, X, eval_metric='acc', num_batch=None, batch_end_callback=None, reset=True): """Run the model given an input and calculate the score as assessed by an evaluation metric. Parameters ---------- X : mxnet.DataIter eval_metric : metric.metric The m...
Run the model given an input and calculate the score as assessed by an evaluation metric. Parameters ---------- X : mxnet.DataIter eval_metric : metric.metric The metric for calculating score. num_batch : int or None The number of batches to run. ...
def lyric(id): """通过歌曲 ID 获取歌曲歌词地址 :param id: 歌曲ID """ if id is None: raise ParamsError() r = NCloudBot() r.method = 'LYRIC' r.params = {'id': id} r.send() return r.response
通过歌曲 ID 获取歌曲歌词地址 :param id: 歌曲ID
def get_sample_value(self, name, labels=None): """Returns the sample value, or None if not found. This is inefficient, and intended only for use in unittests. """ if labels is None: labels = {} for metric in self.collect(): for s in metric.samples: ...
Returns the sample value, or None if not found. This is inefficient, and intended only for use in unittests.
def post_predictions(self, document_id: str, model_name: str) -> dict: """Run inference and create a prediction, calls the POST /predictions endpoint. >>> from las import Client >>> client = Client(endpoint='<api endpoint>') >>> client.post_predictions(document_id='<document id>', model...
Run inference and create a prediction, calls the POST /predictions endpoint. >>> from las import Client >>> client = Client(endpoint='<api endpoint>') >>> client.post_predictions(document_id='<document id>', model_name='invoice') :param document_id: The document id to run inference and...
def get_consumed_read_units_percent( table_name, lookback_window_start=15, lookback_period=5): """ Returns the number of consumed read units in percent :type table_name: str :param table_name: Name of the DynamoDB table :type lookback_window_start: int :param lookback_window_start: Relative...
Returns the number of consumed read units in percent :type table_name: str :param table_name: Name of the DynamoDB table :type lookback_window_start: int :param lookback_window_start: Relative start time for the CloudWatch metric :type lookback_period: int :param lookback_period: Number of minu...
def document_agents(p): """ Document agents in AIKIF (purpose and intent) """ p.comment('agent.py', 'base agent class') p.comment('run_agents.py', 'Top level function to run the agents') p.comment('agent_image_metadata.py', 'agent to collect file picture metadata') p.comment('agent_lear...
Document agents in AIKIF (purpose and intent)
def insert_cylinder(im, xyz0, xyz1, r): r""" Inserts a cylinder of given radius onto a given image Parameters ---------- im : array_like Original voxelated image xyz0, xyz1 : 3-by-1 array_like Voxel coordinates of the two end points of the cylinder r : int Radius of ...
r""" Inserts a cylinder of given radius onto a given image Parameters ---------- im : array_like Original voxelated image xyz0, xyz1 : 3-by-1 array_like Voxel coordinates of the two end points of the cylinder r : int Radius of the cylinder Returns ------- im...
def update_pos(self, pos_id, name, pos_type, location=None): """Update POS resource. Returns the raw response object. Arguments: pos_id: POS id as chosen on registration name: Human-readable name of the POS, used for displaying payment ...
Update POS resource. Returns the raw response object. Arguments: pos_id: POS id as chosen on registration name: Human-readable name of the POS, used for displaying payment request origin to end user pos_type: PO...
def _subtract_timedelta(self, delta): """ Remove timedelta duration from the instance. :param delta: The timedelta instance :type delta: pendulum.Duration or datetime.timedelta :rtype: DateTime """ if isinstance(delta, pendulum.Duration): return self...
Remove timedelta duration from the instance. :param delta: The timedelta instance :type delta: pendulum.Duration or datetime.timedelta :rtype: DateTime
def LMLgrad(self,params=None): """ evaluates the gradient of the log marginal likelihood for the given hyperparameters """ if params is not None: self.setParams(params) KV = self._update_cache() W = KV['W'] LMLgrad = SP.zeros(self.covar.n_params) ...
evaluates the gradient of the log marginal likelihood for the given hyperparameters
def grants(self): """ Returns grants for the current user """ from linode_api4.objects.account import UserGrants resp = self._client.get('/profile/grants') # use special endpoint for restricted users grants = None if resp is not None: # if resp is Non...
Returns grants for the current user
def get_function_doc(function, config=default_config): """Return doc for a function.""" if config.exclude_function: for ex in config.exclude_function: if ex.match(function.__name__): return None return _doc_object(function, 'function', config=config)
Return doc for a function.
def has_valid_dispatch_view_docs(endpoint): """ Return True if dispatch_request is swaggable """ klass = endpoint.__dict__.get('view_class', None) return klass and hasattr(klass, 'dispatch_request') \ and hasattr(endpoint, 'methods') \ and getattr(klass, 'dispatch_request').__doc__
Return True if dispatch_request is swaggable
def getRow(leftU, rightV, jVec): ''' Compute X_{\geq \mu}^T \otimes X_{leq \mu} X_{\geq \mu} = V_{\mu+1}(j_{\mu}) \ldots V_{d} (j_{d}) [left interface matrix] X_{\leq \mu} = U_{1} (j_{1}) \ldots U_{\mu-1}(j_{\mu-1}) [right interface matrix] Parameters: :list of numpy.arrays: leftU ...
Compute X_{\geq \mu}^T \otimes X_{leq \mu} X_{\geq \mu} = V_{\mu+1}(j_{\mu}) \ldots V_{d} (j_{d}) [left interface matrix] X_{\leq \mu} = U_{1} (j_{1}) \ldots U_{\mu-1}(j_{\mu-1}) [right interface matrix] Parameters: :list of numpy.arrays: leftU left-orthogonal cores from 1 to \mu-1 ...
def export(self, input_stats=None): """Export all the stats. Each export module is ran in a dedicated thread. """ # threads = [] input_stats = input_stats or {} for e in self._exports: logger.debug("Export stats using the %s module" % e) thread =...
Export all the stats. Each export module is ran in a dedicated thread.
def t_COMMA(self, t): r',' t.endlexpos = t.lexpos + len(t.value) return t
r',
def client(self): """Returns client session object""" if self._client is None: self._client = get_session(self.user_agent) return self._client
Returns client session object
def from_xyz_string(xyz_string): """ Args: xyz_string: string of the form 'x, y, z', '-x, -y, z', '-2y+1/2, 3x+1/2, z-y+1/2', etc. Returns: SymmOp """ rot_matrix = np.zeros((3, 3)) trans = np.zeros(3) toks = xyz_string.strip...
Args: xyz_string: string of the form 'x, y, z', '-x, -y, z', '-2y+1/2, 3x+1/2, z-y+1/2', etc. Returns: SymmOp
def _validate_namespace(self, namespace): """ Validate whether a CIM namespace exists in the mock repository. Parameters: namespace (:term:`string`): The name of the CIM namespace in the mock repository. Must not be `None`. Raises: :exc:`~p...
Validate whether a CIM namespace exists in the mock repository. Parameters: namespace (:term:`string`): The name of the CIM namespace in the mock repository. Must not be `None`. Raises: :exc:`~pywbem.CIMError`: CIM_ERR_INVALID_NAMESPACE: Namespace does ...
def main(): """Command-Mode: Retrieve and display data then process commands.""" (cred, providers) = config_read() cmd_mode = True conn_objs = cld.get_conns(cred, providers) while cmd_mode: nodes = cld.get_data(conn_objs, providers) node_dict = make_node_dict(nodes, "name") i...
Command-Mode: Retrieve and display data then process commands.
def result(self, value): """The result of the command.""" if self._process_result: self._result = self._process_result(value) self._raw_result = value
The result of the command.
def get_parameter(self, parameter): "Return a dict for given parameter" parameter = self._get_parameter_name(parameter) return self._parameters[parameter]
Return a dict for given parameter
def __expect(self, exp='> ', timeout=None): """will wait for exp to be returned from nodemcu or timeout""" timeout_before = self._port.timeout timeout = timeout or self._timeout #do NOT set timeout on Windows if SYSTEM != 'Windows': # Checking for new data every 100us...
will wait for exp to be returned from nodemcu or timeout
def set_fig_y_label(self, ylabel, **kwargs): """Set overall figure y. Set label for y axis on overall figure. This is not for a specific plot. It will place the label on the figure at the left with a call to ``fig.text``. Args: ylabel (str): ylabel for entire figure. ...
Set overall figure y. Set label for y axis on overall figure. This is not for a specific plot. It will place the label on the figure at the left with a call to ``fig.text``. Args: ylabel (str): ylabel for entire figure. Keyword Arguments: x/y (float, optional):...
def remove(self, document_id, namespace, timestamp): """Removes document from Mongo The input is a python dictionary that represents a mongo document. The documents has ns and _ts fields. """ database, coll = self._db_and_collection(namespace) meta_collection = self._ge...
Removes document from Mongo The input is a python dictionary that represents a mongo document. The documents has ns and _ts fields.