code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def create_dimension(ncfile, name, length) -> None: """Add a new dimension with the given name and length to the given NetCDF file. Essentially, |create_dimension| just calls the equally named method of the NetCDF library, but adds information to possible error messages: >>> from hydpy import Test...
Add a new dimension with the given name and length to the given NetCDF file. Essentially, |create_dimension| just calls the equally named method of the NetCDF library, but adds information to possible error messages: >>> from hydpy import TestIO >>> from hydpy.core.netcdftools import netcdf4 >...
def showtraceback(self, *args, **kwargs): """Display the exception that just occurred.""" # Override for avoid using sys.excepthook PY-12600 try: type, value, tb = sys.exc_info() sys.last_type = type sys.last_value = value sys.last_traceback = tb ...
Display the exception that just occurred.
def _scalar_field_to_json(field, row_value): """Maps a field and value to a JSON-safe value. Args: field ( \ :class:`~google.cloud.bigquery.schema.SchemaField`, \ ): The SchemaField to use for type conversion and field name. row_value (any): Value to ...
Maps a field and value to a JSON-safe value. Args: field ( \ :class:`~google.cloud.bigquery.schema.SchemaField`, \ ): The SchemaField to use for type conversion and field name. row_value (any): Value to be converted, based on the field's type. Return...
def requires(self): """ This task's dependencies: * :py:class:`~.AggregateArtists` or * :py:class:`~.AggregateArtistsSpark` if :py:attr:`~/.Top10Artists.use_spark` is set. :return: object (:py:class:`luigi.task.Task`) """ if self.use_spark: return Ag...
This task's dependencies: * :py:class:`~.AggregateArtists` or * :py:class:`~.AggregateArtistsSpark` if :py:attr:`~/.Top10Artists.use_spark` is set. :return: object (:py:class:`luigi.task.Task`)
def song(self, song_id): """Get information about a song. Parameters: song_id (str): A song ID. Returns: dict: Song information. """ if song_id.startswith('T'): song_info = self._call( mc_calls.FetchTrack, song_id ).body else: song_info = next( ( song for song in self...
Get information about a song. Parameters: song_id (str): A song ID. Returns: dict: Song information.
def calc_and_plot_sample_orient_check(self): """ If sample orientation is on plots the wrong arrow, wrong compass, and rotated sample error directions for the current specimen interpretation on the high level mean plot so that you can check sample orientation good/bad. ""...
If sample orientation is on plots the wrong arrow, wrong compass, and rotated sample error directions for the current specimen interpretation on the high level mean plot so that you can check sample orientation good/bad.
def _get_content(data, which_content): """ get the content that could be hidden in the middle of "content" or "summary detail" from the data of the provider """ content = '' if data.get(which_content): if isinstance(data.get(which_content),...
get the content that could be hidden in the middle of "content" or "summary detail" from the data of the provider
def maybe_download(url, filename): """Download the data from Yann's website, unless it's already here.""" if not os.path.exists(WORK_DIRECTORY): os.mkdir(WORK_DIRECTORY) filepath = os.path.join(WORK_DIRECTORY, filename) if not os.path.exists(filepath): filepath, _ = request.urlretrieve(url + filename, f...
Download the data from Yann's website, unless it's already here.
def drp_load_data(package, data, confclass=None): """Load the DRPS from data.""" drpdict = yaml.safe_load(data) ins = load_instrument(package, drpdict, confclass=confclass) if ins.version == 'undefined': pkg = importlib.import_module(package) ins.version = getattr(pkg, '__version__', 'un...
Load the DRPS from data.
def get_plot(self, normalize_rxn_coordinate=True, label_barrier=True): """ Returns the NEB plot. Uses Henkelman's approach of spline fitting each section of the reaction path based on tangent force and energies. Args: normalize_rxn_coordinate (bool): Whether to normalize the...
Returns the NEB plot. Uses Henkelman's approach of spline fitting each section of the reaction path based on tangent force and energies. Args: normalize_rxn_coordinate (bool): Whether to normalize the reaction coordinate to between 0 and 1. Defaults to True. labe...
def read_string_from_file(path, encoding="utf8"): """ Read entire contents of file into a string. """ with codecs.open(path, "rb", encoding=encoding) as f: value = f.read() return value
Read entire contents of file into a string.
def add_role(self, role, term, start_date=None, end_date=None, **kwargs): """ Examples: leg.add_role('member', term='2009', chamber='upper', party='Republican', district='10th') """ self['roles'].append(dict(role=role, term=term, ...
Examples: leg.add_role('member', term='2009', chamber='upper', party='Republican', district='10th')
def press(self): ''' press key via name or key code. Supported key name includes: home, back, left, right, up, down, center, menu, search, enter, delete(or del), recent(recent apps), volume_up, volume_down, volume_mute, camera, power. Usage: d.press.back() # pres...
press key via name or key code. Supported key name includes: home, back, left, right, up, down, center, menu, search, enter, delete(or del), recent(recent apps), volume_up, volume_down, volume_mute, camera, power. Usage: d.press.back() # press back key d.press.menu() # ...
def _copy_old_features(new_eopatch, old_eopatch, copy_features): """ Copy features from old EOPatch :param new_eopatch: New EOPatch container where the old features will be copied to :type new_eopatch: EOPatch :param old_eopatch: Old EOPatch container where the old features are located ...
Copy features from old EOPatch :param new_eopatch: New EOPatch container where the old features will be copied to :type new_eopatch: EOPatch :param old_eopatch: Old EOPatch container where the old features are located :type old_eopatch: EOPatch :param copy_features: List of tupl...
def OSCBlob(next): """Convert a string into an OSC Blob, returning a (typetag, data) tuple.""" if type(next) == type(""): length = len(next) padded = math.ceil((len(next)) / 4.0) * 4 binary = struct.pack(">i%ds" % (padded), length, next) tag = 'b' else: tag ...
Convert a string into an OSC Blob, returning a (typetag, data) tuple.
def _create_deployment_object(self, job_name, job_image, deployment_name, port=80, replicas=1, cmd_string=None, engine_json_file='~/.ipython/profile_default/security/ipcontroller-engin...
Create a kubernetes deployment for the job. Args: - job_name (string) : Name of the job and deployment - job_image (string) : Docker image to launch KWargs: - port (integer) : Container port - replicas : Number of replica containers to maintain ...
def analyze_dir(stats, parent_dir, rel_filepaths, cover_filename, *, ignore_existing=False): """ Analyze a directory (non recursively) to get its album metadata if it is one. """ no_metadata = None, None, None metadata = no_metadata audio_filepaths = [] for rel_filepath in rel_filepaths: stats["files"] +=...
Analyze a directory (non recursively) to get its album metadata if it is one.
def null_concept(self): """Return the null concept of this subsystem. The null concept is a point in concept space identified with the unconstrained cause and effect repertoire of this subsystem. """ # Unconstrained cause repertoire. cause_repertoire = self.cause_reperto...
Return the null concept of this subsystem. The null concept is a point in concept space identified with the unconstrained cause and effect repertoire of this subsystem.
def find_usb_device_by_address(self, name): """Searches for a USB device with the given host address. :py:func:`IUSBDevice.address` in name of type str Address of the USB device (as assigned by the host) to search for. return device o...
Searches for a USB device with the given host address. :py:func:`IUSBDevice.address` in name of type str Address of the USB device (as assigned by the host) to search for. return device of type :class:`IHostUSBDevice` Found USB de...
def first_setup(self): """This is a guess of the meaning of this value.""" if ATTR_FIRST_SETUP not in self.raw: return None return datetime.utcfromtimestamp(self.raw[ATTR_FIRST_SETUP])
This is a guess of the meaning of this value.
def get_osdp(self, id_or_uri): """ Retrieves facts about Server Profiles and Server Profile Templates that are using Deployment Plan based on the ID or URI provided. Args: id_or_uri: ID or URI of the Deployment Plan. Returns: dict: Server Profiles and Server Pro...
Retrieves facts about Server Profiles and Server Profile Templates that are using Deployment Plan based on the ID or URI provided. Args: id_or_uri: ID or URI of the Deployment Plan. Returns: dict: Server Profiles and Server Profile Templates
def set_prefix(self, elt, pyobj): '''use this method to set the prefix of the QName, method looks in DOM to find prefix or set new prefix. This method must be called before get_formatted_content. ''' if isinstance(pyobj, tuple): namespaceURI,localName = pyobj ...
use this method to set the prefix of the QName, method looks in DOM to find prefix or set new prefix. This method must be called before get_formatted_content.
def get_index_text(self, modname, name_cls): """Return index entry text based on object type.""" if self.objtype in ('class', 'record'): if not modname: return _('%s (built-in %s)') % (name_cls[0], self.objtype) return _('%s (%s in %s)') % (name_cls[0], self.objty...
Return index entry text based on object type.
def ability(cls, id_, name, function_type, ability_id, general_id=0): """Define a function represented as a game ability.""" assert function_type in ABILITY_FUNCTIONS return cls(id_, name, ability_id, general_id, function_type, FUNCTION_TYPES[function_type], None)
Define a function represented as a game ability.
def get_external_command_output(command: str) -> bytes: """ Takes a command-line command, executes it, and returns its ``stdout`` output. Args: command: command string Returns: output from the command as ``bytes`` """ args = shlex.split(command) ret = subprocess.check_...
Takes a command-line command, executes it, and returns its ``stdout`` output. Args: command: command string Returns: output from the command as ``bytes``
def _create_ids(self, home_teams, away_teams): """ Creates IDs for both players/teams """ categories = pd.Categorical(np.append(home_teams,away_teams)) home_id, away_id = categories.codes[0:int(len(categories)/2)], categories.codes[int(len(categories)/2):len(categories)+1] ...
Creates IDs for both players/teams
def from_int(data): """ :params data: integer :returns: proquint made from input data :type data: int :rtype: string """ if not isinstance(data, int) and not isinstance(data, long): raise TypeError('Input must be integer') res = [] while data > 0 or not res: for j in...
:params data: integer :returns: proquint made from input data :type data: int :rtype: string
def align_file_position(f, size): """ Align the position in the file to the next block of specified size """ align = (size - 1) - (f.tell() % size) f.seek(align, 1)
Align the position in the file to the next block of specified size
def mstmap(args): """ %prog mstmap LMD50.snps.genotype.txt Convert LMDs to MSTMAP input. """ from jcvi.assembly.geneticmap import MSTMatrix p = OptionParser(mstmap.__doc__) p.add_option("--population_type", default="RIL6", help="Type of population, possible values are DH a...
%prog mstmap LMD50.snps.genotype.txt Convert LMDs to MSTMAP input.
def from_dict(cls, d): """ Restores an object state from a dictionary, used in de-JSONification. :param d: the object dictionary :type d: dict :return: the object :rtype: object """ conf = {} for k in d["config"]: v = d["config"][k] ...
Restores an object state from a dictionary, used in de-JSONification. :param d: the object dictionary :type d: dict :return: the object :rtype: object
def convert_reshape(net, node, module, builder): """Converts a reshape layer from mxnet to coreml. This doesn't currently handle the deprecated parameters for the reshape layer. Parameters ---------- net: network An mxnet network object. node: layer Node to convert. modul...
Converts a reshape layer from mxnet to coreml. This doesn't currently handle the deprecated parameters for the reshape layer. Parameters ---------- net: network An mxnet network object. node: layer Node to convert. module: module A module for MXNet builder: Neura...
def get_user(self, username): """ Given the verified username, look up and return the corresponding user account if it exists, or raising ``ActivationError`` if it doesn't. """ User = get_user_model() try: user = User.objects.get(**{ U...
Given the verified username, look up and return the corresponding user account if it exists, or raising ``ActivationError`` if it doesn't.
def headerData(self, section, orientation, role=Qt.DisplayRole): """ Reimplements the :meth:`QAbstractItemModel.headerData` method. :param section: Section. :type section: int :param orientation: Orientation. ( Qt.Orientation ) :param role: Role. :type role: int ...
Reimplements the :meth:`QAbstractItemModel.headerData` method. :param section: Section. :type section: int :param orientation: Orientation. ( Qt.Orientation ) :param role: Role. :type role: int :return: Header data. :rtype: QVariant
def intersection(self, *args): '''Returns the intersection of the values whose keys are in *args. If *args is blank, returns the intersection of all values. ''' values = self.values() if args: values = [val for key,val in self.items() if key in args] return set(reduc...
Returns the intersection of the values whose keys are in *args. If *args is blank, returns the intersection of all values.
def total_supply(self, block_identifier='latest'): """ Return the total supply of the token at the given block identifier. """ return self.proxy.contract.functions.totalSupply().call(block_identifier=block_identifier)
Return the total supply of the token at the given block identifier.
def id_generator(size=15, random_state=None): """Helper function to generate random div ids. This is useful for embedding HTML into ipython notebooks.""" chars = list(string.ascii_uppercase + string.digits) return ''.join(random_state.choice(chars, size, replace=True))
Helper function to generate random div ids. This is useful for embedding HTML into ipython notebooks.
def encode(data, scheme=None, size=None): """ Encodes `data` in a DataMatrix image. For now bpp is the libdmtx default which is 24 Args: data: bytes instance scheme: encoding scheme - one of `ENCODING_SCHEME_NAMES`, or `None`. If `None`, defaults to 'Ascii'. size: i...
Encodes `data` in a DataMatrix image. For now bpp is the libdmtx default which is 24 Args: data: bytes instance scheme: encoding scheme - one of `ENCODING_SCHEME_NAMES`, or `None`. If `None`, defaults to 'Ascii'. size: image dimensions - one of `ENCODING_SIZE_NAMES`, or `No...
def _calc_min_size(self, conv_layers): """Calculates the minimum size of the input layer. Given a set of convolutional layers, calculate the minimum value of the `input_height` and `input_width`, i.e. such that the output has size 1x1. Assumes snt.VALID padding. Args: conv_layers: List of tu...
Calculates the minimum size of the input layer. Given a set of convolutional layers, calculate the minimum value of the `input_height` and `input_width`, i.e. such that the output has size 1x1. Assumes snt.VALID padding. Args: conv_layers: List of tuples `(output_channels, (kernel_size, stride),...
def getnames(): """ get mail names """ namestring = "" addmore = 1 while addmore: scientist = input("Enter name - <Return> when done ") if scientist != "": namestring = namestring + ":" + scientist else: namestring = namestring[1:] ad...
get mail names
def substitute_selected_state(state, as_template=False, keep_name=False): """ Substitute the selected state with the handed state :param rafcon.core.states.state.State state: A state of any functional type that derives from State :param bool as_template: The flag determines if a handed the state of type Li...
Substitute the selected state with the handed state :param rafcon.core.states.state.State state: A state of any functional type that derives from State :param bool as_template: The flag determines if a handed the state of type LibraryState is insert as template :return:
def _cryptodome_encrypt(cipher_factory, plaintext, key, iv): """Use a Pycryptodome cipher factory to encrypt data. :param cipher_factory: Factory callable that builds a Pycryptodome Cipher instance based on the key and IV :type cipher_factory: callable :param bytes plaintext: Plaintext data to ...
Use a Pycryptodome cipher factory to encrypt data. :param cipher_factory: Factory callable that builds a Pycryptodome Cipher instance based on the key and IV :type cipher_factory: callable :param bytes plaintext: Plaintext data to encrypt :param bytes key: Encryption key :param bytes IV: In...
def export_organizations(self, outfile): """Export organizations information to a file. The method exports information related to organizations, to the given 'outfile' output file. :param outfile: destination file object """ exporter = SortingHatOrganizationsExporter(se...
Export organizations information to a file. The method exports information related to organizations, to the given 'outfile' output file. :param outfile: destination file object
def switch_region(request, region_name, redirect_field_name=auth.REDIRECT_FIELD_NAME): """Switches the user's region for all services except Identity service. The region will be switched if the given region is one of the regions available for the scoped project. Otherwise the region is no...
Switches the user's region for all services except Identity service. The region will be switched if the given region is one of the regions available for the scoped project. Otherwise the region is not switched.
def drawpoint(self, x, y, colour = None): """ Most elementary drawing, single pixel, used mainly for testing purposes. Coordinates are those of your initial image ! """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) ...
Most elementary drawing, single pixel, used mainly for testing purposes. Coordinates are those of your initial image !
def _run_snpeff(snp_in, out_format, data): """Run effects prediction with snpEff, skipping if snpEff database not present. """ snpeff_db, datadir = get_db(data) if not snpeff_db: return None, None assert os.path.exists(os.path.join(datadir, snpeff_db)), \ "Did not find %s snpEff gen...
Run effects prediction with snpEff, skipping if snpEff database not present.
def search(self, query, limit=None): """Use reddit's search function. Returns :class:`things.Listing` object. URL: ``http://www.reddit.com/search/?q=<query>&limit=<limit>`` :param query: query string :param limit: max number of results to get """ return...
Use reddit's search function. Returns :class:`things.Listing` object. URL: ``http://www.reddit.com/search/?q=<query>&limit=<limit>`` :param query: query string :param limit: max number of results to get
def make_store(name, min_length=4, **kwargs): """\ Creates a store with a reasonable keygen. .. deprecated:: 2.0.0 Instantiate stores directly e.g. ``shorten.MemoryStore(min_length=4)`` """ if name not in stores: raise ValueError('valid stores are {0}'.format(', '.join(stores))) if n...
\ Creates a store with a reasonable keygen. .. deprecated:: 2.0.0 Instantiate stores directly e.g. ``shorten.MemoryStore(min_length=4)``
def register_memory(): """Register an approximation of memory used by FTP server process and all of its children. """ # XXX How to get a reliable representation of memory being used is # not clear. (rss - shared) seems kind of ok but we might also use # the private working set via get_memory_map...
Register an approximation of memory used by FTP server process and all of its children.
def get_scenario(scenario_id,**kwargs): """ Get the specified scenario """ user_id = kwargs.get('user_id') scen_i = _get_scenario(scenario_id, user_id) scen_j = JSONObject(scen_i) rscen_rs = db.DBSession.query(ResourceScenario).filter(ResourceScenario.scenario_id==scenario_id).options...
Get the specified scenario
def load_services(self, services=settings.TH_SERVICES): """ get the service from the settings """ kwargs = {} for class_path in services: module_name, class_name = class_path.rsplit('.', 1) klass = import_from_path(class_path) service = kla...
get the service from the settings
def get_activity_mdata(): """Return default mdata map for Activity""" return { 'courses': { 'element_label': { 'text': 'courses', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTyp...
Return default mdata map for Activity
def _filter_choosers_alts(self, choosers, alternatives): """ Apply filters to the choosers and alts tables. """ return ( util.apply_filter_query( choosers, self.choosers_predict_filters), util.apply_filter_query( alternatives, self...
Apply filters to the choosers and alts tables.
def require_meta_and_content(self, content_handler, params, **kwargs): """Require 'meta' and 'content' dictionaries using proper hander. Args: content_handler (callable): function that accepts ``params, meta, **kwargs`` argument and returns dictionary for ``c...
Require 'meta' and 'content' dictionaries using proper hander. Args: content_handler (callable): function that accepts ``params, meta, **kwargs`` argument and returns dictionary for ``content`` response section params (dict): dictionary of parsed resource...
def wrapped_request(self, request, *args, **kwargs): """Create and send a request to the server. This method implements a very small subset of the options possible to send an request. It is provided as a shortcut to sending a simple wrapped request. Parameters ---------...
Create and send a request to the server. This method implements a very small subset of the options possible to send an request. It is provided as a shortcut to sending a simple wrapped request. Parameters ---------- request : str The request to call. ...
def compute_csets_TRAM( connectivity, state_counts, count_matrices, equilibrium_state_counts=None, ttrajs=None, dtrajs=None, bias_trajs=None, nn=None, factor=1.0, callback=None): r""" Computes the largest connected sets in the produce space of Markov state and thermodynamic states for TRAM data. ...
r""" Computes the largest connected sets in the produce space of Markov state and thermodynamic states for TRAM data. Parameters ---------- connectivity : string one of None, 'reversible_pathways', 'post_hoc_RE' or 'BAR_variance', 'neighbors', 'summed_count_matrix' or None. ...
def update_member_names(oldasndict, pydr_input): """ Update names in a member dictionary. Given an association dictionary with rootnames and a list of full file names, it will update the names in the member dictionary to contain '_*' extension. For example a rootname of 'u9600201m' will be repl...
Update names in a member dictionary. Given an association dictionary with rootnames and a list of full file names, it will update the names in the member dictionary to contain '_*' extension. For example a rootname of 'u9600201m' will be replaced by 'u9600201m_c0h' making sure that a MEf file is passed...
def project(self, x, vector): '''Project a vector (gradient or direction) on the active constraints. Arguments: | ``x`` -- The unknowns. | ``vector`` -- A numpy array with a direction or a gradient. The return value is a gradient or direction, where the components...
Project a vector (gradient or direction) on the active constraints. Arguments: | ``x`` -- The unknowns. | ``vector`` -- A numpy array with a direction or a gradient. The return value is a gradient or direction, where the components that point away from the cons...
def optimize_seq_and_branch_len(self,reuse_branch_len=True, prune_short=True, marginal_sequences=False, branch_length_mode='joint', max_iter=5, infer_gtr=False, **kwargs): """ Iteratively set branch lengths and reconstruct ancestral...
Iteratively set branch lengths and reconstruct ancestral sequences until the values of either former or latter do not change. The algorithm assumes knowing only the topology of the tree, and requires that sequences are assigned to all leaves of the tree. The first step is to pre-reconst...
def get_xy_environment(self, xy): '''Get manager address for the environment which should have the agent with given *xy* coordinate, or None if no such environment is in this multi-environment. ''' x = xy[0] y = xy[1] for origin, addr in self._slave_origins: ...
Get manager address for the environment which should have the agent with given *xy* coordinate, or None if no such environment is in this multi-environment.
async def start(self): """Connect to device and listen to incoming messages.""" if self.connection.connected: return await self.connection.connect() # In case credentials have been given externally (i.e. not by pairing # with a device), then use that client id ...
Connect to device and listen to incoming messages.
def cleanParagraph(self): """ Compress text runs, remove whitespace at start and end, skip empty blocks, etc """ runs = self.block.content if not runs: self.block = None return if not self.clean_paragraphs: return jo...
Compress text runs, remove whitespace at start and end, skip empty blocks, etc
def transform(self, X, y=None, copy=None): """ Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. ...
Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. ...
def getresponse(self): ''' Gets the response and generates the _Response object''' status = self._httprequest.status() status_text = self._httprequest.status_text() resp_headers = self._httprequest.get_all_response_headers() fixed_headers = [] for resp_header in resp_hea...
Gets the response and generates the _Response object
def _bisect(value_and_gradients_function, initial_args, f_lim): """Actual implementation of bisect given initial_args in a _BracketResult.""" def _loop_cond(curr): # TODO(b/112524024): Also take into account max_iterations. return ~tf.reduce_all(input_tensor=curr.stopped) def _loop_body(curr): """Nar...
Actual implementation of bisect given initial_args in a _BracketResult.
def ahead(self, i, j=None): '''Raising stopiteration with end the parse. ''' if j is None: return self._stream[self.i + i] else: return self._stream[self.i + i: self.i + j]
Raising stopiteration with end the parse.
def getItemTrace(self): """Returns a node trace up to the <schema> item. """ item, path, name, ref = self, [], 'name', 'ref' while not isinstance(item,XMLSchema) and not isinstance(item,WSDLToolsAdapter): attr = item.getAttribute(name) if not attr: ...
Returns a node trace up to the <schema> item.
def format(self, data, *args, **kwargs): ''' 将传入的Post列表数据进行格式化处理。此处传入的 ``data`` 格式即为 :meth:`.ZhihuDaily.crawl` 返回的格式,但具体内容可以不同,即此处保留了灵活度, 可以对非当日文章对象进行格式化,制作相关主题的合集书籍 :param data: 待处理的文章列表 :type data: list :return: 返回符合mobi打包需求的定制化数据结构 :rtype: dict ...
将传入的Post列表数据进行格式化处理。此处传入的 ``data`` 格式即为 :meth:`.ZhihuDaily.crawl` 返回的格式,但具体内容可以不同,即此处保留了灵活度, 可以对非当日文章对象进行格式化,制作相关主题的合集书籍 :param data: 待处理的文章列表 :type data: list :return: 返回符合mobi打包需求的定制化数据结构 :rtype: dict
def _extend_nocheck(self, iterable): """extends without checking for uniqueness This function should only be used internally by DictList when it can guarantee elements are already unique (as in when coming from self or other DictList). It will be faster because it skips these ch...
extends without checking for uniqueness This function should only be used internally by DictList when it can guarantee elements are already unique (as in when coming from self or other DictList). It will be faster because it skips these checks.
def get_all_in_collection(self, collection_paths: Union[str, Iterable[str]], load_metadata: bool = True) \ -> Sequence[EntityType]: """ Gets entities contained within the given iRODS collections. If one or more of the collection_paths does not exist, a `FileNotFound` exception will ...
Gets entities contained within the given iRODS collections. If one or more of the collection_paths does not exist, a `FileNotFound` exception will be raised. :param collection_paths: the collection(s) to get the entities from :param load_metadata: whether metadata associated to the entities sho...
def make_named_stemmer(stem=None, min_len=3): """Construct a callable object and a string sufficient to reconstruct it later (unpickling) >>> make_named_stemmer('str_lower') ('str_lower', <function str_lower at ...>) >>> make_named_stemmer('Lancaster') ('lancaster', <Stemmer object at ...>) """...
Construct a callable object and a string sufficient to reconstruct it later (unpickling) >>> make_named_stemmer('str_lower') ('str_lower', <function str_lower at ...>) >>> make_named_stemmer('Lancaster') ('lancaster', <Stemmer object at ...>)
def get_teachers_sorted(self): """Get teachers sorted by last name. This is used for the announcement request page. """ teachers = self.get_teachers() teachers = [(u.last_name, u.first_name, u.id) for u in teachers] for t in teachers: if t is None or t[0] is...
Get teachers sorted by last name. This is used for the announcement request page.
def mgmt_root(opt_bigip, opt_username, opt_password, opt_port, opt_token): '''bigip fixture''' try: from pytest import symbols except ImportError: m = ManagementRoot(opt_bigip, opt_username, opt_password, port=opt_port, token=opt_token) else: if symbols...
bigip fixture
def merge_dict(d0, d1, add_new_keys=False, append_arrays=False): """Recursively merge the contents of python dictionary d0 with the contents of another python dictionary, d1. Parameters ---------- d0 : dict The input dictionary. d1 : dict Dictionary to be merged with the input di...
Recursively merge the contents of python dictionary d0 with the contents of another python dictionary, d1. Parameters ---------- d0 : dict The input dictionary. d1 : dict Dictionary to be merged with the input dictionary. add_new_keys : str Do not skip keys that only exis...
def _example_broker_queue(quote_ctx): """ 获取经纪队列,输出 买盘卖盘的经纪ID,经纪名称,经纪档位 """ stock_code_list = ["HK.00700"] for stk_code in stock_code_list: ret_status, ret_data = quote_ctx.subscribe(stk_code, ft.SubType.BROKER) if ret_status != ft.RET_OK: print(ret_data) exi...
获取经纪队列,输出 买盘卖盘的经纪ID,经纪名称,经纪档位
def download(self, files=None, formats=None, glob_pattern=None, dry_run=None, verbose=None, silent=None, ignore_existing=None, checksum=None, destdir=None, ...
Download files from an item. :param files: (optional) Only download files matching given file names. :type formats: str :param formats: (optional) Only download files matching the given Formats. :type glob_pattern: str :param glob_pattern: (optional) On...
def pgcd(numa, numb): """ Calculate the greatest common divisor (GCD) of two numbers. :param numa: First number :type numa: number :param numb: Second number :type numb: number :rtype: number For example: >>> import pmisc, fractions >>> pmisc.pgcd(10, 15) 5...
Calculate the greatest common divisor (GCD) of two numbers. :param numa: First number :type numa: number :param numb: Second number :type numb: number :rtype: number For example: >>> import pmisc, fractions >>> pmisc.pgcd(10, 15) 5 >>> str(pmisc.pgcd(0.05, ...
def validate_path(path): """Validates the provided path :param path: path to validate (string) :raise: :InvalidUsage: If validation fails. """ if not isinstance(path, six.string_types) or not re.match('^/(?:[._a-zA-Z0-9-]/?)+[^/]$', path): raise InvalidU...
Validates the provided path :param path: path to validate (string) :raise: :InvalidUsage: If validation fails.
def _raise_decomposition_errors(uvw, antenna1, antenna2, chunks, ant_uvw, max_err): """ Raises informative exception for an invalid decomposition """ start = 0 problem_str = [] for ci, chunk in enumerate(chunks): end = start + chunk ant1 = antenna1[sta...
Raises informative exception for an invalid decomposition
def log_template_errors(logger, log_level=logging.ERROR): """ Decorator to log template errors to the specified logger. @log_template_errors(logging.getLogger('mylogger'), logging.INFO) def my_view(*args): pass Will log template errors at INFO. The default log level is ERROR. """ ...
Decorator to log template errors to the specified logger. @log_template_errors(logging.getLogger('mylogger'), logging.INFO) def my_view(*args): pass Will log template errors at INFO. The default log level is ERROR.
def print_logs(query, types=None): """ Print status logs. """ if query is None: return for run, log in query: print(("{0} @ {1} - {2} id: {3} group: {4} status: {5}".format( run.end, run.experiment_name, run.project_name, run.experiment_group, run.run_group, log.stat...
Print status logs.
def ncbi_blast(self, db="nr", megablast=True, sequence=None): """ perform an NCBI blast against the sequence of this feature """ import requests requests.defaults.max_retries = 4 assert sequence in (None, "cds", "mrna") seq = self.sequence() if sequence is None el...
perform an NCBI blast against the sequence of this feature
def process_objects(kls): """ Applies default Meta properties. """ # first add a Meta object if not exists if 'Meta' not in kls.__dict__: kls.Meta = type('Meta', (object,), {}) if 'unique_together' not in kls.Meta.__dict__: kls.Meta.unique_together...
Applies default Meta properties.
def map_query(self, variables=None, evidence=None): """ MAP Query method using belief propagation. Note: When multiple variables are passed, it returns the map_query for each of them individually. Parameters ---------- variables: list list of variabl...
MAP Query method using belief propagation. Note: When multiple variables are passed, it returns the map_query for each of them individually. Parameters ---------- variables: list list of variables for which you want to compute the probability evidence: dict ...
def clone(self, **kwargs): ''' Clone this context, and return the ChildContextDict ''' child = ChildContextDict(parent=self, threadsafe=self._threadsafe, overrides=kwargs) return child
Clone this context, and return the ChildContextDict
def _expand_shorthand(model_formula, variables): """Expand shorthand terms in the model formula. """ wm = 'white_matter' gsr = 'global_signal' rps = 'trans_x + trans_y + trans_z + rot_x + rot_y + rot_z' fd = 'framewise_displacement' acc = _get_matches_from_data('a_comp_cor_[0-9]+', variables...
Expand shorthand terms in the model formula.
def action_set(values): """Sets the values to be returned after the action finishes""" cmd = ['action-set'] for k, v in list(values.items()): cmd.append('{}={}'.format(k, v)) subprocess.check_call(cmd)
Sets the values to be returned after the action finishes
def ellipse_from_second_moments(image, labels, indexes, wants_compactness = False): """Calculate measurements of ellipses equivalent to the second moments of labels image - the intensity at each point labels - for each labeled object, derive an ellipse indexes - sequence of indexes to process ...
Calculate measurements of ellipses equivalent to the second moments of labels image - the intensity at each point labels - for each labeled object, derive an ellipse indexes - sequence of indexes to process returns the following arrays: coordinates of the center of the ellipse e...
def Back(self, n = 1, dl = 0): """退格键n次 """ self.Delay(dl) self.keyboard.tap_key(self.keyboard.backspace_key, n)
退格键n次
def _make_valid_state_name(self, state_name): """Transform the input state_name into a valid state in XMLBIF. XMLBIF states must start with a letter an only contain letters, numbers and underscores. """ s = str(state_name) s_fixed = pp.CharsNotIn(pp.alphanums + "_").setPa...
Transform the input state_name into a valid state in XMLBIF. XMLBIF states must start with a letter an only contain letters, numbers and underscores.
def delete(self, option=None): """Delete the current document in the Firestore database. Args: option (Optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying chang...
Delete the current document in the Firestore database. Args: option (Optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: google.p...
def gen_report(report, sdir='./', report_name='report.html'): """ Generates report of derivation and postprocess steps in teneto.derive """ # Create report directory if not os.path.exists(sdir): os.makedirs(sdir) # Add a slash to file directory if not included to avoid DirNameFleName ...
Generates report of derivation and postprocess steps in teneto.derive
def concretize(x, solver, sym_handler): """ For now a lot of naive concretization is done when handling heap metadata to keep things manageable. This idiom showed up a lot as a result, so to reduce code repetition this function uses a callback to handle the one or two operations that varied across invoc...
For now a lot of naive concretization is done when handling heap metadata to keep things manageable. This idiom showed up a lot as a result, so to reduce code repetition this function uses a callback to handle the one or two operations that varied across invocations. :param x: the item to be concretized ...
def _find_statements(self): """Find the statements in `self.code`. Produce a sequence of line numbers that start statements. Recurses into all code objects reachable from `self.code`. """ for bp in self.child_parsers(): # Get all of the lineno information from this...
Find the statements in `self.code`. Produce a sequence of line numbers that start statements. Recurses into all code objects reachable from `self.code`.
def _sorted_copy(self, comparison, reversed=False): """ Returns a sorted copy with the colors arranged according to the given comparison. """ sorted = self.copy() _list.sort(sorted, comparison) if reversed: _list.reverse(sorted) return sorted
Returns a sorted copy with the colors arranged according to the given comparison.
def enqueue_command(self, command_name, args, options): """Enqueue a new command into this pipeline.""" assert_open(self) promise = Promise() self.commands.append((command_name, args, options, promise)) return promise
Enqueue a new command into this pipeline.
def has_segment_tables(xmldoc, name = None): """ Return True if the document contains a complete set of segment tables. Returns False otherwise. If name is given and not None then the return value is True only if the document's segment tables, if present, contain a segment list by that name. """ try: names =...
Return True if the document contains a complete set of segment tables. Returns False otherwise. If name is given and not None then the return value is True only if the document's segment tables, if present, contain a segment list by that name.
def recommend(self, users=None, k=10, exclude=None, items=None, new_observation_data=None, new_user_data=None, new_item_data=None, exclude_known=True, diversity=0, random_seed=None, verbose=True): """ Recommend the ``k`` highest scored items for each...
Recommend the ``k`` highest scored items for each user. Parameters ---------- users : SArray, SFrame, or list, optional Users or observation queries for which to make recommendations. For list, SArray, and single-column inputs, this is simply a set of user I...
def get_collections_for_image(self, image_id): """Get identifier of all collections that contain a given image. Parameters ---------- image_id : string Unique identifierof image object Returns ------- List(string) List of image collection...
Get identifier of all collections that contain a given image. Parameters ---------- image_id : string Unique identifierof image object Returns ------- List(string) List of image collection identifier
def update(self): """Update the data from the thermostat. Always sets the current time.""" _LOGGER.debug("Querying the device..") time = datetime.now() value = struct.pack('BBBBBBB', PROP_INFO_QUERY, time.year % 100, time.month, time.day, ...
Update the data from the thermostat. Always sets the current time.
def run_processes(self, procdetails: List[ProcessDetails], subproc_run_timeout_sec: float = 1, stop_event_timeout_ms: int = 1000, kill_timeout_sec: float = 5) -> None: """ Run multiple child processes. Args:...
Run multiple child processes. Args: procdetails: list of :class:`ProcessDetails` objects (q.v.) subproc_run_timeout_sec: time (in seconds) to wait for each process when polling child processes to see how they're getting on (default ``1``) sto...