code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def ret(f, *args, **kwargs): """Automatically log progress on function entry and exit. Default logging value: info. The function's return value will be included in the logs. *Logging with values contained in the parameters of the decorated function* Message (args[0]) may be a string to be formatted wit...
Automatically log progress on function entry and exit. Default logging value: info. The function's return value will be included in the logs. *Logging with values contained in the parameters of the decorated function* Message (args[0]) may be a string to be formatted with parameters passed to the decor...
def get_yaml_parser_roundtrip_for_context(): """Create a yaml parser that can serialize the pypyr Context. Create yaml parser with get_yaml_parser_roundtrip, adding Context. This allows the yaml parser to serialize the pypyr Context. """ yaml_writer = get_yaml_parser_roundtrip() # Context is a...
Create a yaml parser that can serialize the pypyr Context. Create yaml parser with get_yaml_parser_roundtrip, adding Context. This allows the yaml parser to serialize the pypyr Context.
def read_from_bpch(filename, file_position, shape, dtype, endian, use_mmap=False): """ Read a chunk of data from a bpch output file. Parameters ---------- filename : str Path to file on disk containing the data file_position : int Position (bytes) where desired d...
Read a chunk of data from a bpch output file. Parameters ---------- filename : str Path to file on disk containing the data file_position : int Position (bytes) where desired data chunk begins shape : tuple of ints Resultant (n-dimensional) shape of requested data; the chun...
def consolidate_output(job, config, mutect, pindel, muse): """ Combine the contents of separate tarball outputs into one via streaming :param JobFunctionWrappingJob job: passed automatically by Toil :param Namespace config: Argparse Namespace object containing argument inputs :param str mutect: MuT...
Combine the contents of separate tarball outputs into one via streaming :param JobFunctionWrappingJob job: passed automatically by Toil :param Namespace config: Argparse Namespace object containing argument inputs :param str mutect: MuTect tarball FileStoreID :param str pindel: Pindel tarball FileStore...
def display(self, image): """ Takes a :py:mod:`PIL.Image` and dumps it to a numbered PNG file. """ assert(image.size == self.size) self._last_image = image self._count += 1 filename = self._file_template.format(self._count) image = self.preprocess(image) ...
Takes a :py:mod:`PIL.Image` and dumps it to a numbered PNG file.
def crypto_sign_open(signed, pk): """ Verifies the signature of the signed message ``signed`` using the public key ``pk`` and returns the unsigned message. :param signed: bytes :param pk: bytes :rtype: bytes """ message = ffi.new("unsigned char[]", len(signed)) message_len = ffi.new...
Verifies the signature of the signed message ``signed`` using the public key ``pk`` and returns the unsigned message. :param signed: bytes :param pk: bytes :rtype: bytes
def _prepare_load_balancers(self): """ Prepare load balancer variables """ stack = { A.NAME: self[A.NAME], A.VERSION: self[A.VERSION], } for load_balancer in self.get(R.LOAD_BALANCERS, []): svars = {A.STACK: stack} ...
Prepare load balancer variables
def get_best_ip_by_real_data_fetch(_type='stock'): """ 用特定的数据获取函数测试数据获得的时间,从而选择下载数据最快的服务器ip 默认使用特定品种1min的方式的获取 """ from QUANTAXIS.QAUtil.QADate import QA_util_today_str import time #找到前两天的有效交易日期 pre_trade_date=QA_util_get_real_date(QA_util_today_str()) pre_trade_date=QA_util_get...
用特定的数据获取函数测试数据获得的时间,从而选择下载数据最快的服务器ip 默认使用特定品种1min的方式的获取
def cublasStbmv(handle, uplo, trans, diag, n, k, A, lda, x, incx): """ Matrix-vector product for real triangular-banded matrix. """ status = _libcublas.cublasStbmv_v2(handle, _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans],...
Matrix-vector product for real triangular-banded matrix.
def matches_all_rules(self, target_filename): """ Returns true if the given file matches all the rules in this ruleset. :param target_filename: :return: boolean """ for rule in self.match_rules: if rule.test(target_filename) is False: return F...
Returns true if the given file matches all the rules in this ruleset. :param target_filename: :return: boolean
def get_command(self, ctx, cmd_name): """Get command for click.""" path = "%s.%s" % (__name__, cmd_name) path = path.replace("-", "_") module = importlib.import_module(path) return getattr(module, 'cli')
Get command for click.
def get(obj: JsonObj, item: str, default: JsonObjTypes=None) -> JsonObjTypes: """ Dictionary get routine """ return obj._get(item, default)
Dictionary get routine
def SURFstar_compute_scores(inst, attr, nan_entries, num_attributes, mcmap, NN_near, NN_far, headers, class_type, X, y, labels_std, data_type): """ Unique scoring procedure for SURFstar algorithm. Scoring based on nearest neighbors within defined radius, as well as 'anti-scoring' of far instances outside of r...
Unique scoring procedure for SURFstar algorithm. Scoring based on nearest neighbors within defined radius, as well as 'anti-scoring' of far instances outside of radius of current target instance
def init_group(self, group, chunk_size, compression=None, compression_opts=None): """Initializes a HDF5 group compliant with the stored data. This method creates the datasets 'items', 'labels', 'features' and 'index' and leaves them empty. :param h5py.Group group: Th...
Initializes a HDF5 group compliant with the stored data. This method creates the datasets 'items', 'labels', 'features' and 'index' and leaves them empty. :param h5py.Group group: The group to initializes. :param float chunk_size: The size of a chunk in the file (in MB). :param...
def pilatus_description_metadata(description): """Return metatata from Pilatus image description as dict. Return metadata from Pilatus pixel array detectors by Dectris, created by camserver or TVX software. >>> pilatus_description_metadata('# Pixel_size 172e-6 m x 172e-6 m') {'Pixel_size': (0.0001...
Return metatata from Pilatus image description as dict. Return metadata from Pilatus pixel array detectors by Dectris, created by camserver or TVX software. >>> pilatus_description_metadata('# Pixel_size 172e-6 m x 172e-6 m') {'Pixel_size': (0.000172, 0.000172)}
def _get_public_room(self, room_name, invitees: List[User]): """ Obtain a public, canonically named (if possible) room and invite peers """ room_name_full = f'#{room_name}:{self._server_name}' invitees_uids = [user.user_id for user in invitees] for _ in range(JOIN_RETRIES): ...
Obtain a public, canonically named (if possible) room and invite peers
def import_module(module_fqname, superclasses=None): """Imports the module module_fqname and returns a list of defined classes from that module. If superclasses is defined then the classes returned will be subclasses of the specified superclass or superclasses. If superclasses is plural it must be a tup...
Imports the module module_fqname and returns a list of defined classes from that module. If superclasses is defined then the classes returned will be subclasses of the specified superclass or superclasses. If superclasses is plural it must be a tuple of classes.
def queryset(self, request, queryset): form = self.get_form(request) """ That's the trick - we create self.form when django tries to get our queryset. This allows to create unbount and bound form in the single place. """ self.form = form start_date = form.start_...
That's the trick - we create self.form when django tries to get our queryset. This allows to create unbount and bound form in the single place.
def crop_to_extents(img1, img2, padding): """Crop the images to ensure both fit within the bounding box""" beg_coords1, end_coords1 = crop_coords(img1, padding) beg_coords2, end_coords2 = crop_coords(img2, padding) beg_coords = np.fmin(beg_coords1, beg_coords2) end_coords = np.fmax(end_coords1, en...
Crop the images to ensure both fit within the bounding box
def sign_url_path(url, secret_key, expire_in=None, digest=None): # type: (str, bytes, int, Callable) -> str """ Sign a URL (excluding the domain and scheme). :param url: URL to sign :param secret_key: Secret key :param expire_in: Expiry time. :param digest: Specify the digest function to us...
Sign a URL (excluding the domain and scheme). :param url: URL to sign :param secret_key: Secret key :param expire_in: Expiry time. :param digest: Specify the digest function to use; default is sha256 from hashlib :return: Signed URL
def prepare_actions(self, obs): """Keep a list of the past actions so they can be drawn.""" now = time.time() while self._past_actions and self._past_actions[0].deadline < now: self._past_actions.pop(0) def add_act(ability_id, color, pos, timeout=1): if ability_id: ability = self._s...
Keep a list of the past actions so they can be drawn.
def _read_n_samples(channel_file): """Calculate the number of samples based on the file size Parameters ---------- channel_file : Path path to single filename with the header Returns ------- int number of blocks (i.e. records, in which the data is cut) int numbe...
Calculate the number of samples based on the file size Parameters ---------- channel_file : Path path to single filename with the header Returns ------- int number of blocks (i.e. records, in which the data is cut) int number of samples
def pattern_to_str(pattern): """Convert regex pattern to string. If pattern is string it returns itself, if pattern is SRE_Pattern then return pattern attribute :param pattern: pattern object or string :return: str: pattern sttring """ if isinstance(pattern, str): return repr(patter...
Convert regex pattern to string. If pattern is string it returns itself, if pattern is SRE_Pattern then return pattern attribute :param pattern: pattern object or string :return: str: pattern sttring
def dense_to_deeper_block(dense_layer, weighted=True): '''deeper dense layer. ''' units = dense_layer.units weight = np.eye(units) bias = np.zeros(units) new_dense_layer = StubDense(units, units) if weighted: new_dense_layer.set_weights( (add_noise(weight, np.array([0, 1]...
deeper dense layer.
def phone_numbers(self): """ :rtype: twilio.rest.lookups.v1.phone_number.PhoneNumberList """ if self._phone_numbers is None: self._phone_numbers = PhoneNumberList(self) return self._phone_numbers
:rtype: twilio.rest.lookups.v1.phone_number.PhoneNumberList
def format(self): """Handles the actual behaviour involved with formatting. To change the behaviour, this method should be overridden. Returns -------- list A paginated output of the help command. """ values = {} title = "Description" ...
Handles the actual behaviour involved with formatting. To change the behaviour, this method should be overridden. Returns -------- list A paginated output of the help command.
def crypto_aead_chacha20poly1305_ietf_encrypt(message, aad, nonce, key): """ Encrypt the given ``message`` using the IETF ratified chacha20poly1305 construction described in RFC7539. :param message: :type message: bytes :param aad: :type aad: bytes :param nonce: :type nonce: bytes ...
Encrypt the given ``message`` using the IETF ratified chacha20poly1305 construction described in RFC7539. :param message: :type message: bytes :param aad: :type aad: bytes :param nonce: :type nonce: bytes :param key: :type key: bytes :return: authenticated ciphertext :rtype:...
def _build_tree(self): """ Build a full or a partial tree, depending on the groups/sub-groups specified. """ groups = self._groups or self.get_children_paths(self.root_path) for group in groups: node = Node(name=group, parent=self.root) self.root.children...
Build a full or a partial tree, depending on the groups/sub-groups specified.
def execute_notebook(npth, dpth, timeout=1200, kernel='python3'): """ Execute the notebook at `npth` using `dpth` as the execution directory. The execution timeout and kernel are `timeout` and `kernel` respectively. """ ep = ExecutePreprocessor(timeout=timeout, kernel_name=kernel) nb = nbf...
Execute the notebook at `npth` using `dpth` as the execution directory. The execution timeout and kernel are `timeout` and `kernel` respectively.
def clear(self): 'Clear tracks in memory - all zero' for track in self._tracks: self._tracks[track].setall(False)
Clear tracks in memory - all zero
def _finish_futures(self, responses): """Apply all the batch responses to the futures created. :type responses: list of (headers, payload) tuples. :param responses: List of headers and payloads from each response in the batch. :raises: :class:`ValueError` if n...
Apply all the batch responses to the futures created. :type responses: list of (headers, payload) tuples. :param responses: List of headers and payloads from each response in the batch. :raises: :class:`ValueError` if no requests have been deferred.
def record(self, pipeline_name, from_study): """ Returns the provenance record for a given pipeline Parameters ---------- pipeline_name : str The name of the pipeline that generated the record from_study : str The name of the study that the pipeli...
Returns the provenance record for a given pipeline Parameters ---------- pipeline_name : str The name of the pipeline that generated the record from_study : str The name of the study that the pipeline was generated from Returns ------- re...
def rpm(self, vol_per_rev): """Return the pump speed required for the reactor's stock of material given the volume of fluid output per revolution by the stock's pump. :param vol_per_rev: Volume of fluid pumped per revolution (dependent on pump and tubing) :type vol_per_rev: float ...
Return the pump speed required for the reactor's stock of material given the volume of fluid output per revolution by the stock's pump. :param vol_per_rev: Volume of fluid pumped per revolution (dependent on pump and tubing) :type vol_per_rev: float :return: Pump speed for the material...
def parse(yaml, validate=True): """ Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :type yaml: list|dict|str|file :param validate: Whether to validate the data before attempting to ...
Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :type yaml: list|dict|str|file :param validate: Whether to validate the data before attempting to parse it. :type validate: bool :retu...
def build_ellipse_model(shape, isolist, fill=0., high_harmonics=False): """ Build an elliptical model galaxy image from a list of isophotes. For each ellipse in the input isophote list the algorithm fills the output image array with the corresponding isophotal intensity. Pixels in the output array ...
Build an elliptical model galaxy image from a list of isophotes. For each ellipse in the input isophote list the algorithm fills the output image array with the corresponding isophotal intensity. Pixels in the output array are in general only partially covered by the isophote "pixel". The algorithm ta...
def run_strelka(job, tumor_bam, normal_bam, univ_options, strelka_options, split=True): """ Run the strelka subgraph on the DNA bams. Optionally split the results into per-chromosome vcfs. :param dict tumor_bam: Dict of bam and bai for tumor DNA-Seq :param dict normal_bam: Dict of bam and bai for ...
Run the strelka subgraph on the DNA bams. Optionally split the results into per-chromosome vcfs. :param dict tumor_bam: Dict of bam and bai for tumor DNA-Seq :param dict normal_bam: Dict of bam and bai for normal DNA-Seq :param dict univ_options: Dict of universal options used by almost all tools ...
def type_stmt(self, stmt, p_elem, pset): """Handle ``type`` statement. Built-in types are handled by one of the specific type callback methods defined below. """ typedef = stmt.i_typedef if typedef and not stmt.i_is_derived: # just ref uname, dic = self.uniqu...
Handle ``type`` statement. Built-in types are handled by one of the specific type callback methods defined below.
def daily_from_hourly(df): """Aggregates data (hourly to daily values) according to the characteristics of each variable (e.g., average for temperature, sum for precipitation) Args: df: dataframe including time series with one hour time steps Returns: dataframe (daily) """ df_...
Aggregates data (hourly to daily values) according to the characteristics of each variable (e.g., average for temperature, sum for precipitation) Args: df: dataframe including time series with one hour time steps Returns: dataframe (daily)
def _create_create_tracking_event(instance): """ Create a TrackingEvent and TrackedFieldModification for a CREATE event. """ event = _create_event(instance, CREATE) for field in instance._tracked_fields: if not isinstance(instance._meta.get_field(field), ManyToManyField): _create...
Create a TrackingEvent and TrackedFieldModification for a CREATE event.
def token_generator(self, texts, **kwargs): """Yields tokens from texts as `(text_idx, character)` """ for text_idx, text in enumerate(texts): if self.lower: text = text.lower() for char in text: yield text_idx, char
Yields tokens from texts as `(text_idx, character)`
def stringize( self, rnf_profile, ): """Create RNF representation of this segment. Args: rnf_profile (rnftools.rnfformat.RnfProfile): RNF profile (with widths). """ coor_width = max(rnf_profile.coor_width, len(str(self.left)), len(str(self.right))) return "({},{},{},...
Create RNF representation of this segment. Args: rnf_profile (rnftools.rnfformat.RnfProfile): RNF profile (with widths).
def _initial_guess(self, countsmat): """Generate an initial guess for \theta. """ if self.theta_ is not None: return self.theta_ if self.guess == 'log': transmat, pi = _transmat_mle_prinz(countsmat) K = np.real(scipy.linalg.logm(transmat)) / self.lag...
Generate an initial guess for \theta.
def get_stp_mst_detail_output_msti_port_transmitted_stp_type(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_mst_detail = ET.Element("get_stp_mst_detail") config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, "output") ...
Auto Generated Code
def number_peaks(self, x, n=None): """ As in tsfresh `number_peaks <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\ feature_calculators.py#L1003>`_ Calculates the number of peaks of at least support n in the time series x. A peak o...
As in tsfresh `number_peaks <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\ feature_calculators.py#L1003>`_ Calculates the number of peaks of at least support n in the time series x. A peak of support n is defined \ as a subsequence of x ...
def file(cls, path, encoding=None, parser=None): """Set a file as a source. File are parsed as literal python dicts by default, this behaviour can be configured. Args: path: The path to the file to be parsed encoding: The encoding of the file. De...
Set a file as a source. File are parsed as literal python dicts by default, this behaviour can be configured. Args: path: The path to the file to be parsed encoding: The encoding of the file. Defaults to 'raw'. Available built-in values: 'ini', 'json', '...
def make_sshable(c): """ Set up passwordless SSH keypair & authorized_hosts access to localhost. """ user = c.travis.sudo.user home = "~{0}".format(user) # Run sudo() as the new sudo user; means less chown'ing, etc. c.config.sudo.user = user ssh_dir = "{0}/.ssh".format(home) # TODO: ...
Set up passwordless SSH keypair & authorized_hosts access to localhost.
def flush(self, objects=None, batch_size=None, **kwargs): ''' flush objects stored in self.container or those passed in''' batch_size = batch_size or self.config.get('batch_size') # if we're flushing these from self.store, we'll want to # pop them later. if objects: f...
flush objects stored in self.container or those passed in
def registration_options(self): """Gathers values for common attributes between the registration model and this instance. """ registration_options = {} rs = self.registration_model() for k, v in self.__dict__.items(): if k not in DEFAULT_BASE_FIELDS + ['_state...
Gathers values for common attributes between the registration model and this instance.
def point_dist(pt1, pt2): """ Calculate the Euclidean distance between two n-D points. |pt1 - pt2| .. todo:: Complete point_dist docstring """ # Imports from scipy import linalg as spla dist = spla.norm(point_displ(pt1, pt2)) return dist
Calculate the Euclidean distance between two n-D points. |pt1 - pt2| .. todo:: Complete point_dist docstring
def teardown_logical_port_connectivity(self, context, port_db, hosting_device_id): """Removes connectivity for a logical port. Unplugs the corresponding data interface from the VM. """ if port_db is None or port_db.get('id') is None: ...
Removes connectivity for a logical port. Unplugs the corresponding data interface from the VM.
def _gcs_get_key_names(bucket, pattern): """ Get names of all Google Cloud Storage keys in a specified bucket that match a pattern. """ return [obj.metadata.name for obj in _gcs_get_keys(bucket, pattern)]
Get names of all Google Cloud Storage keys in a specified bucket that match a pattern.
def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, # return it to the pool so it can be reused. if self._con is not None: self._pool.cache(self._con) self._con = None
Close the pooled connection.
def getratio(self, code) : """ Get ratio of code and pattern matched """ if len(code) == 0 : return 0 code_replaced = self.prog.sub('', code) return (len(code) - len(code_replaced)) / len(code)
Get ratio of code and pattern matched
def convert(cls, **kwargsql): """ :param dict kwargsql: Kwargsql expression to convert :return: filter to be used in :py:method:`pymongo.collection.find` :rtype: dict """ filters = [] for k, v in kwargsql.items(): terms = k.split('...
:param dict kwargsql: Kwargsql expression to convert :return: filter to be used in :py:method:`pymongo.collection.find` :rtype: dict
def load(cls, config: Optional[Config] = None): """Load a DataFlowKernel. Args: - config (Config) : Configuration to load. This config will be passed to a new DataFlowKernel instantiation which will be set as the active DataFlowKernel. Returns: - DataFlowKe...
Load a DataFlowKernel. Args: - config (Config) : Configuration to load. This config will be passed to a new DataFlowKernel instantiation which will be set as the active DataFlowKernel. Returns: - DataFlowKernel : The loaded DataFlowKernel object.
def switch_training(self, flag): """ Switch training mode. :param flag: switch on training mode when flag is True. """ if self._is_training == flag: return self._is_training = flag if flag: self._training_flag.set_value(1) else: sel...
Switch training mode. :param flag: switch on training mode when flag is True.
def export_coreml(self, filename): """ Save the model in Core ML format. See Also -------- save Examples -------- >>> model.export_coreml('myModel.mlmodel') """ import coremltools # First define three internal helper functions ...
Save the model in Core ML format. See Also -------- save Examples -------- >>> model.export_coreml('myModel.mlmodel')
def _check_unpack_options(extensions, function, extra_args): """Checks what gets registered as an unpacker.""" # first make sure no other unpacker is registered for this extension existing_extensions = {} for name, info in _UNPACK_FORMATS.items(): for ext in info[0]: existing_extensi...
Checks what gets registered as an unpacker.
def get_sites(self, filter_func=lambda x: True): """ Returns a list of TSquareSite objects that represent the sites available to a user. @param filter_func - A function taking in a Site object as a parameter that returns a True or False, depending on whether ...
Returns a list of TSquareSite objects that represent the sites available to a user. @param filter_func - A function taking in a Site object as a parameter that returns a True or False, depending on whether or not that site should be returned by t...
def _handle_struct_ref(self, node, scope, ctxt, stream): """TODO: Docstring for _handle_struct_ref. :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog("handling struct ref") # name # field struct = s...
TODO: Docstring for _handle_struct_ref. :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO
def delete_api_model(restApiId, modelName, region=None, key=None, keyid=None, profile=None): ''' Delete a model identified by name in a given API CLI Example: .. code-block:: bash salt myminion boto_apigateway.delete_api_model restApiId modelName ''' try: conn = _get_conn(reg...
Delete a model identified by name in a given API CLI Example: .. code-block:: bash salt myminion boto_apigateway.delete_api_model restApiId modelName
def quote_value(value): """ convert values to mysql code for the same mostly delegate directly to the mysql lib, but some exceptions exist """ try: if value == None: return SQL_NULL elif isinstance(value, SQL): return quote_sql(value.template, value.param) ...
convert values to mysql code for the same mostly delegate directly to the mysql lib, but some exceptions exist
def _watch(inotify, watchers, watch_flags, s3_uploader): """As soon as a user is done with a file under `/opt/ml/output/intermediate` we would get notified by using inotify. We would copy this file under `/opt/ml/output/intermediate/.tmp.sagemaker_s3_sync` folder preserving the same folder structure to ...
As soon as a user is done with a file under `/opt/ml/output/intermediate` we would get notified by using inotify. We would copy this file under `/opt/ml/output/intermediate/.tmp.sagemaker_s3_sync` folder preserving the same folder structure to prevent it from being further modified. As we copy the file ...
def ssh_invite(ctx, code_length, user, **kwargs): """ Add a public-key to a ~/.ssh/authorized_keys file """ for name, value in kwargs.items(): setattr(ctx.obj, name, value) from . import cmd_ssh ctx.obj.code_length = code_length ctx.obj.ssh_user = user return go(cmd_ssh.invite, c...
Add a public-key to a ~/.ssh/authorized_keys file
def identity_to_string(identity_dict): """Dump Identity dictionary into its string representation.""" result = [] if identity_dict.get('proto'): result.append(identity_dict['proto'] + '://') if identity_dict.get('user'): result.append(identity_dict['user'] + '@') result.append(identi...
Dump Identity dictionary into its string representation.
def add_item(self, key, value, cache_name=None): """Add an item into the given cache. This is a commodity option (mainly useful for testing) allowing you to store an item in a uWSGI cache during startup. :param str|unicode key: :param value: :param str|unicode cache_n...
Add an item into the given cache. This is a commodity option (mainly useful for testing) allowing you to store an item in a uWSGI cache during startup. :param str|unicode key: :param value: :param str|unicode cache_name: If not set, default will be used.
def contract_multiplier(self): """ [float] 合约乘数,例如沪深300股指期货的乘数为300.0(期货专用) """ try: return self.__dict__["contract_multiplier"] except (KeyError, ValueError): raise AttributeError( "Instrument(order_book_id={}) has no attribute 'contract_mu...
[float] 合约乘数,例如沪深300股指期货的乘数为300.0(期货专用)
def get_params(self, deep=False): """Get parameters.""" params = super(XGBModel, self).get_params(deep=deep) if isinstance(self.kwargs, dict): # if kwargs is a dict, update params accordingly params.update(self.kwargs) if params['missing'] is np.nan: params['miss...
Get parameters.
def create(self, name, script, params=None): ''' /v1/startupscript/create POST - account Create a startup script Link: https://www.vultr.com/api/#startupscript_create ''' params = update_params(params, { 'name': name, 'script': script }) ...
/v1/startupscript/create POST - account Create a startup script Link: https://www.vultr.com/api/#startupscript_create
def thaw_from_args(parser): """Adds command line options for things related to inline thawing of icefiles""" parser.add_argument('--thaw-from', dest='thaw_from', help='Thaw an ICE file containing secrets') parser.add_argument('--gpg-password-path', ...
Adds command line options for things related to inline thawing of icefiles
def get_language(): """Create or retrieve the parse tree for defining a sensor graph.""" global sensor_graph, statement if sensor_graph is not None: return sensor_graph _create_primitives() _create_simple_statements() _create_block_bnf() sensor_graph = ZeroOrMore(statement) + Str...
Create or retrieve the parse tree for defining a sensor graph.
def invertible_1x1_conv(name, x, reverse=False): """1X1 convolution on x. The 1X1 convolution is parametrized as P*L*(U + sign(s)*exp(log(s))) where 1. P is a permutation matrix. 2. L is a lower triangular matrix with diagonal entries unity. 3. U is a upper triangular matrix where the diagonal entries zero. ...
1X1 convolution on x. The 1X1 convolution is parametrized as P*L*(U + sign(s)*exp(log(s))) where 1. P is a permutation matrix. 2. L is a lower triangular matrix with diagonal entries unity. 3. U is a upper triangular matrix where the diagonal entries zero. 4. s is a vector. sign(s) and P are fixed and the...
def _handle_start_center(self, attrs): """ Handle opening center element :param attrs: Attributes of the element :type attrs: Dict """ center_lat = attrs.get("lat") center_lon = attrs.get("lon") if center_lat is None or center_lon is None: rai...
Handle opening center element :param attrs: Attributes of the element :type attrs: Dict
def consolidate_tarballs_job(job, fname_to_id): """ Combine the contents of separate tarballs into one. Subdirs within the tarball will be named the keys in **fname_to_id :param JobFunctionWrappingJob job: passed automatically by Toil :param dict[str,str] fname_to_id: Dictionary of the form: file-n...
Combine the contents of separate tarballs into one. Subdirs within the tarball will be named the keys in **fname_to_id :param JobFunctionWrappingJob job: passed automatically by Toil :param dict[str,str] fname_to_id: Dictionary of the form: file-name-prefix=FileStoreID :return: The file store ID of the...
def colless(self, normalize='leaves'): '''Compute the Colless balance index of this ``Tree``. If the tree has polytomies, they will be randomly resolved Args: ``normalize`` (``str``): How to normalize the Colless index (if at all) * ``None`` to not normalize * ``"l...
Compute the Colless balance index of this ``Tree``. If the tree has polytomies, they will be randomly resolved Args: ``normalize`` (``str``): How to normalize the Colless index (if at all) * ``None`` to not normalize * ``"leaves"`` to normalize by the number of leaves ...
def __check_success(resp): """ Check a JSON server response to see if it was successful :type resp: Dictionary (parsed JSON from response) :param resp: the response string :rtype: String :returns: the success message, if it exists :raises: APIError if the success messa...
Check a JSON server response to see if it was successful :type resp: Dictionary (parsed JSON from response) :param resp: the response string :rtype: String :returns: the success message, if it exists :raises: APIError if the success message is not present
def vm_netstats(vm_=None, **kwargs): ''' Return combined network counters used by the vms on this hyper in a list of dicts: :param vm_: domain name :param connection: libvirt connection URI, overriding defaults .. versionadded:: 2019.2.0 :param username: username to connect with, overr...
Return combined network counters used by the vms on this hyper in a list of dicts: :param vm_: domain name :param connection: libvirt connection URI, overriding defaults .. versionadded:: 2019.2.0 :param username: username to connect with, overriding defaults .. versionadded:: 2019.2....
def reMutualReceptions(self, idA, idB): """ Returns ruler and exaltation mutual receptions. """ mr = self.mutualReceptions(idA, idB) filter_ = ['ruler', 'exalt'] # Each pair of dignities must be 'ruler' or 'exalt' return [(a,b) for (a,b) in mr if (a in filter_ and b in filter_)]
Returns ruler and exaltation mutual receptions.
def getTargetNamespace(self): """return targetNamespace """ parent = self targetNamespace = 'targetNamespace' tns = self.attributes.get(targetNamespace) while not tns and parent and parent._parent is not None: parent = parent._parent() tns = parent...
return targetNamespace
def obspy_3d_plot(inventory, catalog, size=(10.5, 7.5), **kwargs): """ Plot obspy Inventory and obspy Catalog classes in three dimensions. :type inventory: obspy.core.inventory.inventory.Inventory :param inventory: Obspy inventory class containing station metadata :type catalog: obspy.core.event.ca...
Plot obspy Inventory and obspy Catalog classes in three dimensions. :type inventory: obspy.core.inventory.inventory.Inventory :param inventory: Obspy inventory class containing station metadata :type catalog: obspy.core.event.catalog.Catalog :param catalog: Obspy catalog class containing event metadata...
def save_svg(string, parent=None): """ Prompts the user to save an SVG document to disk. Parameters: ----------- string : basestring A Python string containing a SVG document. parent : QWidget, optional The parent to use for the file dialog. Returns: -------- The name ...
Prompts the user to save an SVG document to disk. Parameters: ----------- string : basestring A Python string containing a SVG document. parent : QWidget, optional The parent to use for the file dialog. Returns: -------- The name of the file to which the document was saved...
def dict_to_numpy_dict(obj_dict): """ Convert a dictionary of lists into a dictionary of numpy arrays """ return {key: np.asarray(value) if value is not None else None for key, value in obj_dict.items()}
Convert a dictionary of lists into a dictionary of numpy arrays
def all_files_exist(file_list): """Check if all files exist. :param file_list: the names of files to check. :type file_list: list :returns: ``True`` if all files exist, ``False`` otherwise. """ all_exist = True for filename in file_list: all_exist = all_exist and os.path.isfile(f...
Check if all files exist. :param file_list: the names of files to check. :type file_list: list :returns: ``True`` if all files exist, ``False`` otherwise.
def format_usage_masks(self, V_usage_mask_in, J_usage_mask_in, print_warnings = True): """Format raw usage masks into lists of indices. Usage masks allows the Pgen computation to be conditioned on the V and J gene/allele identities. The inputted masks are lists of strings, or a si...
Format raw usage masks into lists of indices. Usage masks allows the Pgen computation to be conditioned on the V and J gene/allele identities. The inputted masks are lists of strings, or a single string, of the names of the genes or alleles to be conditioned on. The default mask ...
def simplex_find_cycle(self): ''' API: simplex_find_cycle(self) Description: Returns a cycle (list of nodes) if the graph has one, returns None otherwise. Uses DFS. During DFS checks existence of arcs to lower depth regions. Note that direction of ...
API: simplex_find_cycle(self) Description: Returns a cycle (list of nodes) if the graph has one, returns None otherwise. Uses DFS. During DFS checks existence of arcs to lower depth regions. Note that direction of the arcs are not important. Return: ...
def p_ports(self, p): 'ports : ports COMMA portname' wid = None port = Port(name=p[3], width=wid, type=None, lineno=p.lineno(1)) p[0] = p[1] + (port,) p.set_lineno(0, p.lineno(1))
ports : ports COMMA portname
def get_geostationary_angle_extent(geos_area): """Get the max earth (vs space) viewing angles in x and y.""" # TODO: take into account sweep_axis_angle parameter # get some projection parameters req = geos_area.proj_dict['a'] / 1000 rp = geos_area.proj_dict['b'] / 1000 h = geos_area.proj_dict['...
Get the max earth (vs space) viewing angles in x and y.
def main(argv): """This function sets up a command-line option parser and then calls match_and_print to do all of the real work. """ import argparse description = 'Uses Open Tree of Life web services to try to find a taxon ID for each name supplied. ' \ 'Using a --context-name=NAME...
This function sets up a command-line option parser and then calls match_and_print to do all of the real work.
def pdf_row_limiter(rows, limits=None, **kwargs): """ Limit row passing a value. In this case we dont implementate a best effort algorithm because the posibilities are infite with a data text structure from a pdf. """ limits = limits or [None, None] upper_limit = limits[0] if limits else No...
Limit row passing a value. In this case we dont implementate a best effort algorithm because the posibilities are infite with a data text structure from a pdf.
def run(self): """ Perform phantomas run """ self._logger.info("running for <{url}>".format(url=self._url)) args = format_args(self._options) self._logger.debug("command: `{cmd}` / args: {args}". format(cmd=self._cmd, args=args)) # run the process ...
Perform phantomas run
def verify_invoice_params(self, price, currency): """ Deprecated, will be made private in 2.4 """ if re.match("^[A-Z]{3,3}$", currency) is None: raise BitPayArgumentError("Currency is invalid.") try: float(price) except: raise BitPayArgumentError("Price must be formatted as a ...
Deprecated, will be made private in 2.4
def register_opts(conf): """Configure options within configuration library.""" conf.register_cli_opts(CLI_OPTS) conf.register_opts(EPISODE_OPTS) conf.register_opts(FORMAT_OPTS) conf.register_opts(CACHE_OPTS, 'cache')
Configure options within configuration library.
def run_star(job, fastqs, univ_options, star_options): """ This module uses STAR to align the RNA fastqs to the reference ARGUMENTS 1. fastqs: REFER RETURN VALUE of run_cutadapt() 2. univ_options: Dict of universal arguments used by almost all tools univ_options +- 'dockerhub...
This module uses STAR to align the RNA fastqs to the reference ARGUMENTS 1. fastqs: REFER RETURN VALUE of run_cutadapt() 2. univ_options: Dict of universal arguments used by almost all tools univ_options +- 'dockerhub': <dockerhub to use> 3. star_options: Dict of parameters speci...
def log_message(self, msg, *args): """Hook to log a message.""" if args: msg = msg % args self.logger.info(msg)
Hook to log a message.
def relabel(self, qubits: Qubits) -> 'Channel': """Return a copy of this channel with new qubits""" chan = copy(self) chan.vec = chan.vec.relabel(qubits) return chan
Return a copy of this channel with new qubits
def get(self, name): """ Get workspace infos from name. Return None if workspace doesn't exists. """ ws_list = self.list() return ws_list[name] if name in ws_list else None
Get workspace infos from name. Return None if workspace doesn't exists.
def update(self): """Replace baseline representations previously registered for update.""" for linenum in reversed(sorted(self.updates)): self.replace_baseline_repr(linenum, self.updates[linenum]) if not self.TEST_MODE: path = '{}.update{}'.format(*os.path.splitext(self....
Replace baseline representations previously registered for update.
def set_group_anonymous(self, *, group_id, enable=True): """ 群组匿名 ------------ :param int group_id: 群号 :param bool enable: 是否允许匿名聊天 :return: None :rtype: None """ return super().__getattr__('set_group_anonymous') \ (group_id=group_id,...
群组匿名 ------------ :param int group_id: 群号 :param bool enable: 是否允许匿名聊天 :return: None :rtype: None
def get_relationship_info(tree, media, image_sizes): """ There is a separate file holds the targets to links as well as the targets for images. Return a dictionary based on the relationship id and the target. """ if tree is None: return {} result = {} # Loop through each relation...
There is a separate file holds the targets to links as well as the targets for images. Return a dictionary based on the relationship id and the target.
def _parse_name(self, name): """Internal method to parse a `string` name into constituent `ifo, `name` and `version` components. Parameters ---------- name : `str`, `None` the full name of a `DataQualityFlag` to parse, e.g. ``'H1:DMT-SCIENCE:1'``, or `Non...
Internal method to parse a `string` name into constituent `ifo, `name` and `version` components. Parameters ---------- name : `str`, `None` the full name of a `DataQualityFlag` to parse, e.g. ``'H1:DMT-SCIENCE:1'``, or `None` to set all components to ...