Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
100
def data(self): try: data = self._data except AttributeError: data = self._data = json.loads(self.json) return data
Returns self.json loaded as a python object.
101
def dbmin10years(self, value=None): if value is not None: try: value = float(value) except ValueError: raise ValueError( .format(value)) self._dbmin10years = value
Corresponds to IDD Field `dbmin10years` 10-year return period values for minimum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmin10years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
102
def build_java_worker_command( java_worker_options, redis_address, plasma_store_name, raylet_name, redis_password, temp_dir, ): assert java_worker_options is not None command = "java ".format(java_worker_options) if redis_address is not None: command += "-Dray.redis.address={} ".format(redis_address) if plasma_store_name is not None: command += ( "-Dray.object-store.socket-name={} ".format(plasma_store_name)) if raylet_name is not None: command += "-Dray.raylet.socket-name={} ".format(raylet_name) if redis_password is not None: command += "-Dray.redis.password={} ".format(redis_password) command += "-Dray.home={} ".format(RAY_HOME) command += "-Dray.log-dir={} ".format(os.path.join(temp_dir, "sockets")) if java_worker_options: command += java_worker_options + " " command += "org.ray.runtime.runner.worker.DefaultWorker" return command
This method assembles the command used to start a Java worker. Args: java_worker_options (str): The command options for Java worker. redis_address (str): Redis address of GCS. plasma_store_name (str): The name of the plasma store socket to connect to. raylet_name (str): The name of the raylet socket to create. redis_password (str): The password of connect to redis. temp_dir (str): The path of the temporary directory Ray will use. Returns: The command string for starting Java worker.
103
def clean_course(self): course_id = self.cleaned_data[self.Fields.COURSE].strip() if not course_id: return None try: client = EnrollmentApiClient() return client.get_course_details(course_id) except (HttpClientError, HttpServerError): raise ValidationError(ValidationMessages.INVALID_COURSE_ID.format(course_id=course_id))
Verify course ID and retrieve course details.
104
def decrease_frequency(self, frequency=None): if frequency is None: javabridge.call(self.jobject, "decreaseFrequency", "()V") else: javabridge.call(self.jobject, "decreaseFrequency", "(I)V", frequency)
Decreases the frequency. :param frequency: the frequency to decrease by, 1 if None :type frequency: int
105
def _process_response_xml(self, response_xml): result = {} xml = ElementTree.fromstring(response_xml) if xml.tag == : logger.error( u + % response_xml) errors_message = u for error in xml.findall(): error_code = error.find().text error_message = error.find().text errors_message += u % (error_code, error_message) raise PagSeguroPaymentException(errors_message) if xml.tag == : result[] = xml.find().text try: xml_date = xml.find().text result[] = dateutil.parser.parse(xml_date) except: logger.exception(u) result[] = None else: raise PagSeguroPaymentException( u) return result
Processa o xml de resposta e caso não existam erros retorna um dicionario com o codigo e data. :return: dictionary
106
def purge(self, queue, nowait=True, ticket=None, cb=None): nowait = nowait and self.allow_nowait() and not cb args = Writer() args.write_short(ticket or self.default_ticket).\ write_shortstr(queue).\ write_bit(nowait) self.send_frame(MethodFrame(self.channel_id, 50, 30, args)) if not nowait: self._purge_cb.append(cb) return self.channel.add_synchronous_cb(self._recv_purge_ok)
Purge all messages in a queue.
107
def parallel_map(task, values, task_args=tuple(), task_kwargs={}, num_processes=CPU_COUNT): if len(values) == 1: return [task(values[0], *task_args, **task_kwargs)] Publisher().publish("terra.parallel.start", len(values)) nfinished = [0] def _callback(_): nfinished[0] += 1 Publisher().publish("terra.parallel.done", nfinished[0]) if platform.system() != and num_processes > 1 \ and os.getenv() == : os.environ[] = try: pool = Pool(processes=num_processes) async_res = [pool.apply_async(task, (value,) + task_args, task_kwargs, _callback) for value in values] while not all([item.ready() for item in async_res]): for item in async_res: item.wait(timeout=0.1) pool.terminate() pool.join() except KeyboardInterrupt: pool.terminate() pool.join() Publisher().publish("terra.parallel.finish") raise QiskitError() Publisher().publish("terra.parallel.finish") os.environ[] = return [ar.get() for ar in async_res] results = [] for _, value in enumerate(values): result = task(value, *task_args, **task_kwargs) results.append(result) _callback(0) Publisher().publish("terra.parallel.finish") return results
Parallel execution of a mapping of `values` to the function `task`. This is functionally equivalent to:: result = [task(value, *task_args, **task_kwargs) for value in values] On Windows this function defaults to a serial implementation to avoid the overhead from spawning processes in Windows. Args: task (func): Function that is to be called for each value in ``values``. values (array_like): List or array of values for which the ``task`` function is to be evaluated. task_args (list): Optional additional arguments to the ``task`` function. task_kwargs (dict): Optional additional keyword argument to the ``task`` function. num_processes (int): Number of processes to spawn. Returns: result: The result list contains the value of ``task(value, *task_args, **task_kwargs)`` for each value in ``values``. Raises: QiskitError: If user interrupts via keyboard. Events: terra.parallel.start: The collection of parallel tasks are about to start. terra.parallel.update: One of the parallel task has finished. terra.parallel.finish: All the parallel tasks have finished.
108
def clone(self): self.config[] = self.config[].lower() self.config[] = int(self.config[] * 1024) print("Cloning %s to new host %s with %sMB RAM..." % ( self.config[], self.config[], self.config[] )) ip_settings = list() for key, ip_string in enumerate(self.config[]): ip = IPAddress(ip_string) for network in self.config[]: if ip in IPNetwork(network): self.config[][network][] = ip ipnet = IPNetwork(network) self.config[][network][] = str( ipnet.netmask ) ip_settings.append(self.config[][network]) "settings for this network in config.yml." % ip_string) sys.exit(1) self.get_obj([vim.Network], ip_settings[0][]) datacenter = self.get_obj([vim.Datacenter], ip_settings[0][] ) if self.config[]: destfolder = self.content.searchIndex.FindByInventoryPath( self.config[] ) else: destfolder = datacenter.vmFolder cluster = self.get_obj([vim.ClusterComputeResource], ip_settings[0][] ) resource_pool_str = self.config[] if resource_pool_str == \ and ( in ip_settings[key]): resource_pool_str = ip_settings[key][] resource_pool = self.get_resource_pool(cluster, resource_pool_str) host_system = self.config[] if host_system != "": host_system = self.get_obj([vim.HostSystem], self.config[] ) if self.debug: self.print_debug( "Destination cluster", cluster ) self.print_debug( "Resource pool", resource_pool ) if resource_pool is None: resource_pool = cluster.resourcePool datastore = None if self.config[]: datastore = self.get_obj( [vim.Datastore], self.config[]) elif in ip_settings[0]: datastore = self.get_obj( [vim.Datastore], ip_settings[0][]) if datastore is None: print("Error: Unable to find Datastore " % ip_settings[0][]) sys.exit(1) if self.config[]: template_vm = self.get_vm_failfast( self.config[], False, , path=self.config[] ) else: template_vm = self.get_vm_failfast( self.config[], False, ) relospec = vim.vm.RelocateSpec() relospec.datastore = datastore if host_system: relospec.host = host_system if resource_pool: relospec.pool = resource_pool devices = [] adaptermaps = [] try: for device in template_vm.config.hardware.device: if hasattr(device, ): pass for key, ip in enumerate(ip_settings): nic = vim.vm.device.VirtualDeviceSpec() nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.add nic.device = vim.vm.device.VirtualVmxnet3() nic.device.wakeOnLanEnabled = True nic.device.addressType = nic.device.key = 4000 nic.device.deviceInfo = vim.Description() nic.device.deviceInfo.label = % (key + 1) if in ip_settings[key]: dvpg = ip_settings[key][] nic.device.deviceInfo.summary = dvpg pg_obj = self.get_obj([vim.dvs.DistributedVirtualPortgroup], dvpg) dvs_port_connection = vim.dvs.PortConnection() dvs_port_connection.portgroupKey = pg_obj.key dvs_port_connection.switchUuid = ( pg_obj.config.distributedVirtualSwitch.uuid ) e_nic = vim.vm.device.VirtualEthernetCard nic.device.backing = ( e_nic.DistributedVirtualPortBackingInfo() ) nic.device.backing.port = dvs_port_connection else: nic.device.deviceInfo.summary = ip_settings[key][] nic.device.backing = ( vim.vm.device.VirtualEthernetCard.NetworkBackingInfo() ) nic.device.backing.network = ( self.get_obj([vim.Network], ip_settings[key][]) ) nic.device.backing.deviceName = ip_settings[key][] nic.device.backing.useAutoDetect = False nic.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo() nic.device.connectable.startConnected = True nic.device.connectable.allowGuestControl = True devices.append(nic) if in ip_settings[key]: custom_spec_name = ip_settings[key][] customspec = ( self.get_customization_settings(custom_spec_name) ) guest_map = customspec.nicSettingMap[0] else: customspec = vim.vm.customization.Specification() guest_map = vim.vm.customization.AdapterMapping() guest_map.adapter = vim.vm.customization.IPSettings() guest_map.adapter.ip = vim.vm.customization.FixedIp() guest_map.adapter.ip.ipAddress = str(ip_settings[key][]) if in ip_settings[key]: guest_map.adapter.subnetMask = ( str(ip_settings[key][]) ) if in ip_settings[key]: guest_map.adapter.gateway = ip_settings[key][] if self.config[]: guest_map.adapter.dnsDomain = self.config[] adaptermaps.append(guest_map) if in self.config: globalip = vim.vm.customization.GlobalIPSettings() globalip.dnsServerList = self.config[] globalip.dnsSuffixList = self.config[] customspec.globalIPSettings = globalip ident = vim.vm.customization.LinuxPrep() ident.domain = self.config[] ident.hostName = vim.vm.customization.FixedName() ident.hostName.name = self.config[] customspec.nicSettingMap = adaptermaps customspec.identity = ident vmconf = vim.vm.ConfigSpec() vmconf.numCPUs = self.config[] vmconf.memoryMB = self.config[] vmconf.cpuHotAddEnabled = True vmconf.memoryHotAddEnabled = True vmconf.deviceChange = devices clonespec = vim.vm.CloneSpec() clonespec.location = relospec clonespec.config = vmconf clonespec.customization = customspec clonespec.powerOn = True clonespec.template = False self.addDisks(template_vm, clonespec) if self.debug: self.print_debug("CloneSpec", clonespec) tasks = [template_vm.Clone(folder=destfolder, name=self.config[], spec=clonespec )] result = self.WaitForTasks(tasks) if self.config[]: try: os.environ[] = self.config[] print("Running --post-clone-cmd %s" % self.config[]) os.system(self.config[]) except Exception as e: print("Error running post-clone command. Exception: %s" % e) pass if self.config[]: self.send_email()
Command Section: clone Clone a VM from a template
109
def boxes_intersect(box1, box2): xmin1, xmax1, ymin1, ymax1 = box1 xmin2, xmax2, ymin2, ymax2 = box2 if interval_intersection_width(xmin1, xmax1, xmin2, xmax2) and \ interval_intersection_width(ymin1, ymax1, ymin2, ymax2): return True else: return False
Determines if two rectangles, each input as a tuple (xmin, xmax, ymin, ymax), intersect.
110
def difference(self, other, sort=None): self._validate_sort_keyword(sort) self._assert_can_do_setop(other) other, result_names = self._convert_can_do_setop(other) if len(other) == 0: return self if self.equals(other): return MultiIndex(levels=self.levels, codes=[[]] * self.nlevels, names=result_names, verify_integrity=False) this = self._get_unique_index() indexer = this.get_indexer(other) indexer = indexer.take((indexer != -1).nonzero()[0]) label_diff = np.setdiff1d(np.arange(this.size), indexer, assume_unique=True) difference = this.values.take(label_diff) if sort is None: difference = sorted(difference) if len(difference) == 0: return MultiIndex(levels=[[]] * self.nlevels, codes=[[]] * self.nlevels, names=result_names, verify_integrity=False) else: return MultiIndex.from_tuples(difference, sortorder=0, names=result_names)
Compute set difference of two MultiIndex objects Parameters ---------- other : MultiIndex sort : False or None, default None Sort the resulting MultiIndex if possible .. versionadded:: 0.24.0 .. versionchanged:: 0.24.1 Changed the default value from ``True`` to ``None`` (without change in behaviour). Returns ------- diff : MultiIndex
111
def enable_external_loaders(obj): for name, loader in ct.EXTERNAL_LOADERS.items(): enabled = getattr( obj, "{}_ENABLED_FOR_DYNACONF".format(name.upper()), False ) if ( enabled and enabled not in false_values and loader not in obj.LOADERS_FOR_DYNACONF ): obj.logger.debug("loaders: Enabling %s", loader) obj.LOADERS_FOR_DYNACONF.insert(0, loader)
Enable external service loaders like `VAULT_` and `REDIS_` looks forenv variables like `REDIS_ENABLED_FOR_DYNACONF`
112
def fit(self): self._mcmcfit = self.mcmcsetup.run() self._mcmcfit.burnin(self.burnin) dmin = min(self._mcmcfit.depth_segments) dmax = max(self._mcmcfit.depth_segments) self._thick = (dmax - dmin) / len(self.mcmcfit.depth_segments) self._depth = np.arange(dmin, dmax + 0.001) self._age_ensemble = np.array([self.agedepth(d=dx) for dx in self.depth])
Fit MCMC AgeDepthModel
113
def _netstat_route_sunos(): ret = [] cmd = out = __salt__[](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() ret.append({ : , : comps[0], : comps[1], : , : comps[2], : comps[5] if len(comps) >= 6 else }) cmd = out = __salt__[](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() ret.append({ : , : comps[0], : comps[1], : , : comps[2], : comps[5] if len(comps) >= 6 else }) return ret
Return netstat routing information for SunOS
114
def read_snapshots(path, comments=" nodetype=None, timestamptype=None, encoding=, keys=False): ids = None lines = (line.decode(encoding) for line in path) if keys: ids = read_ids(path.name, delimiter=delimiter, timestamptype=timestamptype) return parse_snapshots(lines, comments=comments, directed=directed, delimiter=delimiter, nodetype=nodetype, timestamptype=timestamptype, keys=ids)
Read a DyNetx graph from snapshot graph list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter
115
def request(self, method, url, **kwargs): resp = super(CookieSession, self).request(method, url, **kwargs) if not self._auto_renew: return resp is_expired = any(( resp.status_code == 403 and response_to_json_dict(resp).get() == , resp.status_code == 401 )) if is_expired: self.login() resp = super(CookieSession, self).request(method, url, **kwargs) return resp
Overrides ``requests.Session.request`` to renew the cookie and then retry the original request (if required).
116
def _iter_restrict(self, zeros, ones): inputs = list(self.inputs) unmapped = dict() for i, v in enumerate(self.inputs): if v in zeros: inputs[i] = 0 elif v in ones: inputs[i] = 1 else: unmapped[v] = i vs = sorted(unmapped.keys()) for num in range(1 << len(vs)): for v, val in boolfunc.num2point(num, vs).items(): inputs[unmapped[v]] = val yield sum((val << i) for i, val in enumerate(inputs))
Iterate through indices of all table entries that vary.
117
def _setup_states(state_definitions, prev=()): states = list(prev) for state_def in state_definitions: if len(state_def) != 2: raise TypeError( "The attribute of a workflow should be " "a two-tuple of strings; got %r instead." % (state_def,) ) name, title = state_def state = State(name, title) if any(st.name == name for st in states): states = [state if st.name == name else st for st in states] else: states.append(state) return StateList(states)
Create a StateList object from a 'states' Workflow attribute.
118
def resolve(self, geoid, id_only=False): level, code, validity = geoids.parse(geoid) qs = self(level=level, code=code) if id_only: qs = qs.only() if validity == : result = qs.latest() else: result = qs.valid_at(validity).first() return result.id if id_only and result else result
Resolve a GeoZone given a GeoID. The start date is resolved from the given GeoID, ie. it find there is a zone valid a the geoid validity, resolve the `latest` alias or use `latest` when no validity is given. If `id_only` is True, the result will be the resolved GeoID instead of the resolved zone.
119
def handle_no_document(self, item_session: ItemSession) -> Actions: self._waiter.reset() action = self.handle_response(item_session) if action == Actions.NORMAL: item_session.set_status(Status.skipped) return action
Callback for successful responses containing no useful document. Returns: A value from :class:`.hook.Actions`.
120
def _constrain_pan(self): if self.xmin is not None and self.xmax is not None: p0 = self.xmin + 1. / self._zoom[0] p1 = self.xmax - 1. / self._zoom[0] p0, p1 = min(p0, p1), max(p0, p1) self._pan[0] = np.clip(self._pan[0], p0, p1) if self.ymin is not None and self.ymax is not None: p0 = self.ymin + 1. / self._zoom[1] p1 = self.ymax - 1. / self._zoom[1] p0, p1 = min(p0, p1), max(p0, p1) self._pan[1] = np.clip(self._pan[1], p0, p1)
Constrain bounding box.
121
def predict(self, t): t = np.asarray(t) return self._predict(np.ravel(t)).reshape(t.shape)
Predict the smoothed function value at time t Parameters ---------- t : array_like Times at which to predict the result Returns ------- y : ndarray Smoothed values at time t
122
def send(self, value): if not self.block and self._stdin is not None: self.writer.write("{}\n".format(value)) return self else: raise TypeError(NON_BLOCKING_ERROR_MESSAGE)
Send text to stdin. Can only be used on non blocking commands Args: value (str): the text to write on stdin Raises: TypeError: If command is blocking Returns: ShellCommand: return this ShellCommand instance for chaining
123
def kth_to_last_dict(head, k): if not (head and k > -1): return False d = dict() count = 0 while head: d[count] = head head = head.next count += 1 return len(d)-k in d and d[len(d)-k]
This is a brute force method where we keep a dict the size of the list Then we check it for the value we need. If the key is not in the dict, our and statement will short circuit and return False
124
def listunspent(self, address: str) -> list: try: return cast(dict, self.ext_fetch( + address))[] except KeyError: raise InsufficientFunds()
Returns unspent transactions for given address.
125
def get_next_event(event, now): year = now.year month = now.month day = now.day e_day = event[0].l_start_date.day e_end_day = event[0].l_end_date.day good_today = True if event[0].l_start_date.time() >= now.time() else False if event[0].starts_same_year_month_as(year, month) and \ e_day <= now.day <= e_end_day: occurrences = CountHandler(year, month, event).get_count() future_dates = (x for x in occurrences if x >= now.day) day = min(future_dates, key=lambda x: abs(x - now.day)) else: e_year = event[0].l_start_date.year e_month = event[0].l_start_date.month if date(e_year, e_month, e_day) > date(now.year, now.month, now.day): return check_weekday(year, month, day) return year, month, day
Returns the next occurrence of a given event, relative to 'now'. The 'event' arg should be an iterable containing one element, namely the event we'd like to find the occurrence of. The reason for this is b/c the get_count() function of CountHandler, which this func makes use of, expects an iterable. CHANGED: The 'now' arg must be an instance of datetime.datetime() to allow time comparison (used to accept datetime.date() as well)
126
def _set_active_policy(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=active_policy.active_policy, is_container=, presence=False, yang_name="active-policy", rest_name="active-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: None}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "container", : , }) self.__active_policy = t if hasattr(self, ): self._set()
Setter method for active_policy, mapped from YANG variable /rbridge_id/secpolicy/active_policy (container) If this variable is read-only (config: false) in the source YANG file, then _set_active_policy is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_active_policy() directly. YANG Description: Set the Active policy
127
def ellipse(self, x,y,w,h,style=): "Draw a ellipse" if(style==): op= elif(style== or style==): op= else: op= cx = x + w/2.0 cy = y + h/2.0 rx = w/2.0 ry = h/2.0 lx = 4.0/3.0*(math.sqrt(2)-1)*rx ly = 4.0/3.0*(math.sqrt(2)-1)*ry self._out(sprintf(, (cx+rx)*self.k, (self.h-cy)*self.k, (cx+rx)*self.k, (self.h-(cy-ly))*self.k, (cx+lx)*self.k, (self.h-(cy-ry))*self.k, cx*self.k, (self.h-(cy-ry))*self.k)) self._out(sprintf(, (cx-lx)*self.k, (self.h-(cy-ry))*self.k, (cx-rx)*self.k, (self.h-(cy-ly))*self.k, (cx-rx)*self.k, (self.h-cy)*self.k)) self._out(sprintf(, (cx-rx)*self.k, (self.h-(cy+ly))*self.k, (cx-lx)*self.k, (self.h-(cy+ry))*self.k, cx*self.k, (self.h-(cy+ry))*self.k)) self._out(sprintf(, (cx+lx)*self.k, (self.h-(cy+ry))*self.k, (cx+rx)*self.k, (self.h-(cy+ly))*self.k, (cx+rx)*self.k, (self.h-cy)*self.k, op))
Draw a ellipse
128
def _construct_from_json(self, rec): self.delete() for required_key in [, ]: setattr(self, required_key, rec[required_key]) for job_json in rec.get(, []): self._add_job_from_spec(job_json) self.commit(cascade=True)
Construct this Dagobah instance from a JSON document.
129
def _sync_string_to(bin_or_str, string): if isinstance(string, type(bin_or_str)): return string elif isinstance(string, binary_type): return string.decode(DEFAULT_ENCODING) else: return string.encode(DEFAULT_ENCODING)
Python 3 compliance: ensure two strings are the same type (unicode or binary)
130
def get_grades(self): return GradeList( self._my_map[], runtime=self._runtime, proxy=self._proxy)
Gets the grades in this system ranked from highest to lowest. return: (osid.grading.GradeList) - the list of grades raise: IllegalState - ``is_based_on_grades()`` is ``false`` raise: OperationFailed - unable to complete request *compliance: mandatory -- This method must be implemented.*
131
def _do_search(self): if self._results_cache is None: response = self.raw() results = self.to_python(response.get(, {}).get(, [])) self._results_cache = DictSearchResults( self.type, response, results, None) return self._results_cache
Perform the mlt call, then convert that raw format into a SearchResults instance and return it.
132
def add_cron(self, name, minute, hour, mday, month, wday, who, command, env=None): raise NotImplementedError
Add an entry to the system crontab.
133
def get_tc_api(self, host, headers=None, cert=None, logger=None): if logger is None and self.logger: logger = self.logger return Api(host, headers, cert, logger)
Gets HttpApi wrapped into a neat little package that raises TestStepFail if expected status code is not returned by the server. Default setting for expected status code is 200. Set expected to None when calling methods to ignore the expected status code parameter or set raiseException = False to disable raising the exception.
134
def torecarray(*args, **kwargs): import numpy as np return toarray(*args, **kwargs).view(np.recarray)
Convenient shorthand for ``toarray(*args, **kwargs).view(np.recarray)``.
135
def shader_substring(body, stack_frame=1): line_count = len(body.splitlines(True)) line_number = inspect.stack()[stack_frame][2] + 1 - line_count return % (line_number, textwrap.dedent(body))
Call this method from a function that defines a literal shader string as the "body" argument. Dresses up a shader string in two ways: 1) Insert #line number declaration 2) un-indents The line number information can help debug glsl compile errors. The unindenting allows you to type the shader code at a pleasing indent level in your python method, while still creating an unindented GLSL string at the end.
136
def schedule_task(self, task_id): task = self.registry.get(task_id) job_args = self._build_job_arguments(task) archiving_cfg = task.archiving_cfg fetch_from_archive = False if not archiving_cfg else archiving_cfg.fetch_from_archive queue = Q_ARCHIVE_JOBS if fetch_from_archive else Q_CREATION_JOBS job_id = self._scheduler.schedule_job_task(queue, task.task_id, job_args, delay=0) logger.info("Job return job_id
Schedule a task. :param task_id: identifier of the task to schedule :raises NotFoundError: raised when the requested task is not found in the registry
137
def parse_row(self, row, row_index, cell_mode=CellMode.cooked): return [self.parse_cell(cell, (col_index, row_index), cell_mode) \ for col_index, cell in enumerate(row)]
Parse a row according to the given cell_mode.
138
def timeseries(X, **kwargs): pl.figure(figsize=(2*rcParams[][0], rcParams[][1]), subplotpars=sppars(left=0.12, right=0.98, bottom=0.13)) timeseries_subplot(X, **kwargs)
Plot X. See timeseries_subplot.
139
def parse_filename_meta(filename): common_pattern = "_%s_%s" % ( "(?P<product>[a-zA-Z]{3}[a-zA-Z]?-[a-zA-Z0-9]{2}[a-zA-Z0-9]?-[a-zA-Z0-9]{4}[a-zA-Z0-9]?)", "(?P<platform>[gG][1-9]{2})" ) patterns = { "l2_pattern": re.compile("%s_s(?P<start>[0-9]{8}T[0-9]{6})Z_e(?P<end>[0-9]{8}T[0-9]{6})Z" % common_pattern), "l1b_pattern": re.compile( % common_pattern), "dayfile_pattern": re.compile("%s_d(?P<start>[0-9]{8})" % common_pattern), "monthfile_pattern": re.compile("%s_m(?P<start>[0-9]{6})" % common_pattern), "yearfile_pattern": re.compile("%s_y(?P<start>[0-9]{4})" % common_pattern), } match, dt_start, dt_end = None, None, None for pat_type, pat in patterns.items(): match = pat.search(filename) if match is not None: if pat_type == "l2_pattern": dt_start = datetime.strptime(match.group("start"), ) dt_end = datetime.strptime(match.group("end"), ) elif pat_type == "l1b_pattern": dt_start = datetime.strptime(match.group("start"), ) dt_end = datetime.strptime(match.group("end"), ) elif pat_type == "dayfile_pattern": dt_start = datetime.strptime(match.group("start"), "%Y%m%d") dt_end = dt_start + timedelta(hours=24) elif pat_type == "monthfile_pattern": dt_start = datetime.strptime(match.group("start"), "%Y%m") dt_end = datetime(dt_start.year, dt_start.month + 1, 1) elif pat_type == "yearfile_pattern": dt_start = datetime.strptime(match.group("start"), "%Y") dt_end = datetime(dt_start.year + 1, 1, 1) break if match is None: if "NCEI" in filename and ".fits" in filename: dt_start = datetime.strptime("T".join(filename.split("_")[4:6]), "%Y%m%dT%H%M%S") dt_end = dt_start angstroms = int(filename.split("_")[2]) atom = "Fe" if angstroms != 304 else "He" product = "SUVI-L1b-{}{}".format(atom, angstroms) return filename, dt_start, dt_end, "g16", product else: raise ValueError("Timestamps not detected in filename: %s" % filename) else: return filename, dt_start, dt_end, match.group("platform"), match.group("product")
taken from suvi code by vhsu Parse the metadata from a product filename, either L1b or l2. - file start - file end - platform - product :param filename: string filename of product :return: (start datetime, end datetime, platform)
140
def _pi_id(self): pi_rev_code = self._pi_rev_code() if pi_rev_code: for model, codes in _PI_REV_CODES.items(): if pi_rev_code in codes: return model return None
Try to detect id of a Raspberry Pi.
141
def format_seq(self, outstream=None, linewidth=70): if linewidth == 0 or len(self.seq) <= linewidth: if outstream is None: return self.seq else: print(self.seq, file=outstream) return i = 0 seq = while i < len(self.seq): if outstream is None: seq += self.seq[i:i+linewidth] + else: print(self.seq[i:i+linewidth], file=outstream) i += linewidth if outstream is None: return seq
Print a sequence in a readable format. :param outstream: if `None`, formatted sequence is returned as a string; otherwise, it is treated as a file-like object and the formatted sequence is printed to the outstream :param linewidth: width for wrapping sequences over multiple lines; set to 0 for no wrapping
142
def html_visit_inheritance_diagram(self, node): graph = node[] graph_hash = get_graph_hash(node) name = % graph_hash graphviz_output_format = self.builder.env.config.graphviz_output_format.upper() current_filename = self.builder.current_docname + self.builder.out_suffix urls = {} for child in node: if child.get() is not None: if graphviz_output_format == : urls[child[]] = os.path.join("..", child.get()) else: urls[child[]] = child.get() elif child.get() is not None: if graphviz_output_format == : urls[child[]] = os.path.join(, current_filename + + child.get()) else: urls[child[]] = + child.get() dotcode = graph.generate_dot(name, urls, env=self.builder.env) render_dot_html( self, node, dotcode, {}, , , alt= + node[], link_to_svg=) raise nodes.SkipNode
Output the graph for HTML. This will insert a PNG with clickable image map.
143
def remove_udp_port(self, port): if port in self._used_udp_ports: self._used_udp_ports.remove(port)
Removes an associated UDP port number from this project. :param port: UDP port number
144
def timeout_selecting(self): logger.debug(, self.current_state) if len(self.offers) >= MAX_OFFERS_COLLECTED: logger.debug( ) raise self.REQUESTING() if self.discover_attempts >= MAX_ATTEMPTS_DISCOVER: logger.debug( , MAX_ATTEMPTS_DISCOVER, self.discover_attempts) if len(self.offers) <= 0: logger.debug( ) raise self.ERROR() logger.debug( ) raise self.REQUESTING() logger.debug( ) raise self.SELECTING()
Timeout of selecting on SELECTING state. Not specifiyed in [:rfc:`7844`]. See comments in :func:`dhcpcapfsm.DHCPCAPFSM.timeout_request`.
145
def delete(self, docids): self.check_session() result = self.session.delete(docids) if self.autosession: self.commit() return result
Delete documents from the current session.
146
def _build_dictionary(self, results): foreign = self._foreign_key dictionary = {} for result in results: key = getattr(result.pivot, foreign) if key not in dictionary: dictionary[key] = [] dictionary[key].append(result) return dictionary
Build model dictionary keyed by the relation's foreign key. :param results: The results :type results: Collection :rtype: dict
147
def normalize_surfs(in_file, transform_file, newpath=None): img = nb.load(in_file) transform = load_transform(transform_file) pointset = img.get_arrays_from_intent()[0] coords = pointset.data.T c_ras_keys = (, , ) ras = np.array([[float(pointset.metadata[key])] for key in c_ras_keys]) ones = np.ones((1, coords.shape[1]), dtype=coords.dtype) pointset.data = transform.dot(np.vstack((coords + ras, ones)))[:3].T.astype(coords.dtype) secondary = nb.gifti.GiftiNVPairs(, ) geom_type = nb.gifti.GiftiNVPairs(, ) has_ass = has_geo = False for nvpair in pointset.meta.data: if nvpair.name in c_ras_keys: nvpair.value = elif nvpair.name == secondary.name: has_ass = True elif nvpair.name == geom_type.name: has_geo = True fname = os.path.basename(in_file) if in fname.lower() or in fname.lower(): if not has_ass: pointset.meta.data.insert(1, secondary) if not has_geo: pointset.meta.data.insert(2, geom_type) if newpath is not None: newpath = os.getcwd() out_file = os.path.join(newpath, fname) img.to_filename(out_file) return out_file
Re-center GIFTI coordinates to fit align to native T1 space For midthickness surfaces, add MidThickness metadata Coordinate update based on: https://github.com/Washington-University/workbench/blob/1b79e56/src/Algorithms/AlgorithmSurfaceApplyAffine.cxx#L73-L91 and https://github.com/Washington-University/Pipelines/blob/ae69b9a/PostFreeSurfer/scripts/FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L147
148
def calculate_average_scores_on_graph( graph: BELGraph, key: Optional[str] = None, tag: Optional[str] = None, default_score: Optional[float] = None, runs: Optional[int] = None, use_tqdm: bool = False, ): subgraphs = generate_bioprocess_mechanisms(graph, key=key) scores = calculate_average_scores_on_subgraphs( subgraphs, key=key, tag=tag, default_score=default_score, runs=runs, use_tqdm=use_tqdm ) return scores
Calculate the scores over all biological processes in the sub-graph. As an implementation, it simply computes the sub-graphs then calls :func:`calculate_average_scores_on_subgraphs` as described in that function's documentation. :param graph: A BEL graph with heats already on the nodes :param key: The key in the node data dictionary representing the experimental data. Defaults to :data:`pybel_tools.constants.WEIGHT`. :param tag: The key for the nodes' data dictionaries where the scores will be put. Defaults to 'score' :param default_score: The initial score for all nodes. This number can go up or down. :param runs: The number of times to run the heat diffusion workflow. Defaults to 100. :param use_tqdm: Should there be a progress bar for runners? :return: A dictionary of {pybel node tuple: results tuple} :rtype: dict[tuple, tuple] Suggested usage with :mod:`pandas`: >>> import pandas as pd >>> from pybel_tools.analysis.heat import calculate_average_scores_on_graph >>> graph = ... # load graph and data >>> scores = calculate_average_scores_on_graph(graph) >>> pd.DataFrame.from_items(scores.items(), orient='index', columns=RESULT_LABELS)
149
def add_suffix(fullname, suffix): name, ext = os.path.splitext(fullname) return name + + suffix + ext
Add suffix to a full file name
150
def get_num_processes(): cpu_count = multiprocessing.cpu_count() if config.NUMBER_OF_CORES == 0: raise ValueError( ) if config.NUMBER_OF_CORES > cpu_count: log.info(, config.NUMBER_OF_CORES, cpu_count) return cpu_count if config.NUMBER_OF_CORES < 0: num = cpu_count + config.NUMBER_OF_CORES + 1 if num <= 0: raise ValueError( .format(num, cpu_count)) return num return config.NUMBER_OF_CORES
Return the number of processes to use in parallel.
151
def destination(self, point, bearing, distance=None): point = Point(point) lat1 = units.radians(degrees=point.latitude) lng1 = units.radians(degrees=point.longitude) bearing = units.radians(degrees=bearing) if distance is None: distance = self if isinstance(distance, Distance): distance = distance.kilometers d_div_r = float(distance) / self.RADIUS lat2 = asin( sin(lat1) * cos(d_div_r) + cos(lat1) * sin(d_div_r) * cos(bearing) ) lng2 = lng1 + atan2( sin(bearing) * sin(d_div_r) * cos(lat1), cos(d_div_r) - sin(lat1) * sin(lat2) ) return Point(units.degrees(radians=lat2), units.degrees(radians=lng2))
TODO docs.
152
def eintr_retry(exc_type, f, *args, **kwargs): while True: try: return f(*args, **kwargs) except exc_type as exc: if exc.errno != EINTR: raise else: break
Calls a function. If an error of the given exception type with interrupted system call (EINTR) occurs calls the function again.
153
def _get_bonds(self, mol): num_atoms = len(mol) if self.ignore_ionic_bond: covalent_atoms = [i for i in range(num_atoms) if mol.species[i].symbol not in self.ionic_element_list] else: covalent_atoms = list(range(num_atoms)) all_pairs = list(itertools.combinations(covalent_atoms, 2)) pair_dists = [mol.get_distance(*p) for p in all_pairs] elements = mol.composition.as_dict().keys() unavailable_elements = list(set(elements) - set(self.covalent_radius.keys())) if len(unavailable_elements) > 0: raise ValueError("The covalent radius for element {} is not " "available".format(unavailable_elements)) bond_13 = self.get_13_bonds(self.priority_bonds) max_length = [(self.covalent_radius[mol.sites[p[0]].specie.symbol] + self.covalent_radius[mol.sites[p[1]].specie.symbol]) * (1 + (self.priority_cap if p in self.priority_bonds else (self.bond_length_cap if p not in bond_13 else self.bond_13_cap))) * (0.1 if (self.ignore_halogen_self_bond and p not in self.priority_bonds and mol.sites[p[0]].specie.symbol in self.halogen_list and mol.sites[p[1]].specie.symbol in self.halogen_list) else 1.0) for p in all_pairs] bonds = [bond for bond, dist, cap in zip(all_pairs, pair_dists, max_length) if dist <= cap] return bonds
Find all the bond in a molcule Args: mol: the molecule. pymatgen Molecule object Returns: List of tuple. Each tuple correspond to a bond represented by the id of the two end atoms.
154
def now(self, when=None): if when is None: when = _TaskManager().get_time() tup = time.localtime(when) self.value = (tup[0]-1900, tup[1], tup[2], tup[6] + 1) return self
Set the current value to the correct tuple based on the seconds since the epoch. If 'when' is not provided, get the current time from the task manager.
155
def profile(request, status=200): if request.method == : if request.GET.get("username", False): try: user_profile = User.objects.get(username=request.GET.get("username"), userprofile__public=True).userprofile except ObjectDoesNotExist: raise Http404("user not found or have not public profile") else: user_id = get_user_id(request) if get_config(, , default=True) and not is_user_id_overridden(request): migrated_user = migrate_google_openid_user(request.user) if migrated_user is not None: auth.logout(request) migrated_user.backend = auth.login(request, migrated_user) user_profile = get_object_or_404(UserProfile, user_id=user_id) return render_json( request, user_profile, status=status, template=, help_text=profile.__doc__) elif request.method == : with transaction.atomic(): to_save = json_body(request.body.decode("utf-8")) user_id = get_user_id(request) user_profile = get_object_or_404(UserProfile, user_id=user_id) user = to_save.get(, None) if in to_save: user_profile.send_emails = bool(to_save[]) if in to_save: user_profile.public = bool(to_save[]) if user: error = _save_user(request, user, new=False) if error: return render_json(request, error, template=, status=400) if in to_save: user_profile.save_properties(to_save[]) user_profile.save() request.method = "GET" return profile(request, status=202) else: return HttpResponseBadRequest("method %s is not allowed".format(request.method))
Get the user's profile. If the user has no assigned profile, the HTTP 404 is returned. Make a POST request to modify the user's profile. GET parameters: html turn on the HTML version of the API username: username of user (only for users with public profile) stats: attache addition user statistics POST parameters (JSON): send_emails: switcher turning on sending e-mails to user public: swicher making the user's profile publicly available user: password: user's password password_check: user's password again to check it first_name (optional): user's first name last_name (optional): user's last name
156
def visit(self, node): for pattern, replace in know_pattern: check = Check(node, dict()) if check.visit(pattern): node = PlaceholderReplace(check.placeholders).visit(replace()) self.update = True return super(PatternTransform, self).visit(node)
Try to replace if node match the given pattern or keep going.
157
def tf_initialize(self, x_init, b): if x_init is None: x_init = [tf.zeros(shape=util.shape(t)) for t in b] initial_args = super(ConjugateGradient, self).tf_initialize(x_init) conjugate = residual = [t - fx for t, fx in zip(b, self.fn_x(x_init))] squared_residual = tf.add_n(inputs=[tf.reduce_sum(input_tensor=(res * res)) for res in residual]) return initial_args + (conjugate, residual, squared_residual)
Initialization step preparing the arguments for the first iteration of the loop body: $x_0, 0, p_0, r_0, r_0^2$. Args: x_init: Initial solution guess $x_0$, zero vector if None. b: The right-hand side $b$ of the system of linear equations. Returns: Initial arguments for tf_step.
158
def gen_locale(locale, **kwargs): **en_IE.UTF-8 UTF-8 on_debian = __grains__.get() == on_ubuntu = __grains__.get() == on_gentoo = __grains__.get() == on_suse = __grains__.get() == on_solaris = __grains__.get() == if on_solaris: return locale in __salt__[]() locale_info = salt.utils.locales.split_locale(locale) locale_search_str = .format(locale_info[], locale_info[]) if not locale_info[] and not on_ubuntu: locale_info[] = locale_info[] locale = salt.utils.locales.join_locale(locale_info) if on_debian or on_gentoo: search = valid = __salt__[](search, .format(locale), flags=re.MULTILINE) else: if on_suse: search = else: search = try: valid = locale_search_str in os.listdir(search) except OSError as ex: log.error(ex) raise CommandExecutionError( "Locale \"{0}\" is not available.".format(locale)) if not valid: log.error( , locale, search) return False if os.path.exists(): __salt__[]( , r.format(locale), .format(locale), append_if_not_found=True ) elif on_ubuntu: __salt__[]( .format(locale_info[]) ) __salt__[]( .format(locale_info[]), locale, locale, append_if_not_found=True ) if salt.utils.path.which(): cmd = [] if on_gentoo: cmd.append() if on_ubuntu: cmd.append(salt.utils.locales.normalize_locale(locale)) else: cmd.append(locale) elif salt.utils.path.which(): cmd = [, , , locale_search_str, , locale_info[], .format(locale_search_str, locale_info[]), kwargs.get(, False) and or ] else: raise CommandExecutionError( ) res = __salt__[](cmd) if res[]: log.error(res[]) if kwargs.get(): return res else: return res[] == 0
Generate a locale. Options: .. versionadded:: 2014.7.0 :param locale: Any locale listed in /usr/share/i18n/locales or /usr/share/i18n/SUPPORTED for Debian and Gentoo based distributions, which require the charmap to be specified as part of the locale when generating it. verbose Show extra warnings about errors that are normally ignored. CLI Example: .. code-block:: bash salt '*' locale.gen_locale en_US.UTF-8 salt '*' locale.gen_locale 'en_IE.UTF-8 UTF-8' # Debian/Gentoo only
159
def _set_collector(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("collector_name",collector.collector, yang_name="collector", rest_name="collector", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: None, u: None, u: u, u: u}}), is_container=, yang_name="collector", rest_name="collector", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: None, u: None, u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__collector = t if hasattr(self, ): self._set()
Setter method for collector, mapped from YANG variable /telemetry/collector (list) If this variable is read-only (config: false) in the source YANG file, then _set_collector is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_collector() directly.
160
def _get_dvs_infrastructure_traffic_resources(dvs_name, dvs_infra_traffic_ress): log.trace(%s\ , dvs_name) res_dicts = [] for res in dvs_infra_traffic_ress: res_dict = {: res.key, : res.allocationInfo.limit, : res.allocationInfo.reservation} if res.allocationInfo.shares: res_dict.update({: res.allocationInfo.shares.shares, : res.allocationInfo.shares.level}) res_dicts.append(res_dict) return res_dicts
Returns a list of dict representations of the DVS infrastructure traffic resource dvs_name The name of the DVS dvs_infra_traffic_ress The DVS infrastructure traffic resources
161
def get_user_groups(name, sid=False): if name == : groups = [name] else: groups = win32net.NetUserGetLocalGroups(None, name) if not sid: return groups ret_groups = set() for group in groups: ret_groups.add(get_sid_from_name(group)) return ret_groups
Get the groups to which a user belongs Args: name (str): The user name to query sid (bool): True will return a list of SIDs, False will return a list of group names Returns: list: A list of group names or sids
162
def parse_text(document, container, element): "Parse text element." txt = None alternate = element.find(_name()) if alternate is not None: parse_alternate(document, container, alternate) br = element.find(_name()) if br is not None: if _name() in br.attrib: _type = br.attrib[_name()] brk = doc.Break(_type) else: brk = doc.Break() container.elements.append(brk) t = element.find(_name()) if t is not None: txt = doc.Text(t.text) txt.parent = container container.elements.append(txt) rpr = element.find(_name()) if rpr is not None: parse_previous_properties(document, txt, rpr) for r in element.findall(_name()): parse_text(document, container, r) foot = element.find(_name()) if foot is not None: parse_footnote(document, container, foot) end = element.find(_name()) if end is not None: parse_endnote(document, container, end) sym = element.find(_name()) if sym is not None: _font = sym.attrib[_name()] _char = sym.attrib[_name()] container.elements.append(doc.Symbol(font=_font, character=_char)) image = element.find(_name()) if image is not None: parse_drawing(document, container, image) refe = element.find(_name()) if refe is not None: _m = doc.Comment(refe.attrib[_name()], ) container.elements.append(_m) return
Parse text element.
163
def response(self, msgid, error, result): if error: self.requests[msgid].errback(Exception(str(error))) else: self.requests[msgid].callback(result) del self.requests[msgid]
Handle a results message given to the proxy by the protocol object.
164
def load(self, args): self._queue.append(tc.CMD_LOAD) self._string += struct.pack("!BiB", 0, 1 + 4 + 1 + 1 + 4 + sum(map(len, args)) + 4 * len(args), tc.CMD_LOAD) self._packStringList(args) self._sendExact()
Load a simulation from the given arguments.
165
def reverseCommit(self): col = self.cursorPos[1] for ii, text in enumerate(self.insertedText): line = ii + self.cursorPos[0] self.qteWidget.setSelection(line, col, line, col + len(text)) self.baseClass.removeSelectedText() self.qteWidget.setCursorPosition(*self.cursorPos)
Re-insert the previously deleted line.
166
def call_binop(self, context, operator, left, right): return self.binop_table[operator](left, right)
For intercepted binary operator calls (:meth:`intercepted_binops`) this function is executed instead of the builtin operator. This can be used to fine tune the behavior of certain operators. .. versionadded:: 2.6
167
def consume_message(self, header, message): logmessage = { "time": (time.time() % 1000) * 1000, "header": "", "message": message, } if header: logmessage["header"] = ( json.dumps(header, indent=2) + "\n" + "----------------" + "\n" ) if isinstance(message, dict): logmessage["message"] = ( json.dumps(message, indent=2) + "\n" + "----------------" + "\n" ) print("=== Consume ====\n{header}{message}".format(**logmessage)) self.log.info("Received message @{time}".format(**logmessage)) self.log.debug( "Received message @{time}\n{header}{message}".format(**logmessage) ) time.sleep(0.1)
Consume a message
168
def start_adc(self, channel, gain=1, data_rate=None): assert 0 <= channel <= 3, return self._read(channel + 0x04, gain, data_rate, ADS1x15_CONFIG_MODE_CONTINUOUS)
Start continuous ADC conversions on the specified channel (0-3). Will return an initial conversion result, then call the get_last_result() function to read the most recent conversion result. Call stop_adc() to stop conversions.
169
def parse(self, fp, headersonly=False): fp = TextIOWrapper(fp, encoding=, errors=) with fp: return self.parser.parse(fp, headersonly)
Create a message structure from the data in a binary file. Reads all the data from the file and returns the root of the message structure. Optional headersonly is a flag specifying whether to stop parsing after reading the headers or not. The default is False, meaning it parses the entire contents of the file.
170
def track_dependency(self, name:str, data:str, type:str=None, target:str=None, duration:int=None, success:bool=None, result_code:str=None, properties:Dict[str, object]=None, measurements:Dict[str, object]=None, dependency_id:str=None): raise NotImplementedError()
Sends a single dependency telemetry that was captured for the application. :param name: the name of the command initiated with this dependency call. Low cardinality value. Examples are stored procedure name and URL path template. :param data: the command initiated by this dependency call. Examples are SQL statement and HTTP URL with all query parameters. :param type: the dependency type name. Low cardinality value for logical grouping of dependencies and interpretation of other fields like commandName and resultCode. Examples are SQL, Azure table, and HTTP. (default to: None) :param target: the target site of a dependency call. Examples are server name, host address. (default to: None) :param duration: the number of milliseconds that this dependency call lasted. (defaults to: None) :param success: true if the dependency call ended in success, false otherwise. (defaults to: None) :param result_code: the result code of a dependency call. Examples are SQL error code and HTTP status code. (defaults to: None) :param properties: the set of custom properties the client wants attached to this data item. (defaults to: None) :param measurements: the set of custom measurements the client wants to attach to this data item. (defaults to: None) :param id: the id for this dependency call. If None, a new uuid will be generated. (defaults to: None)
171
def _check_infinite_flows(self, steps, flows=None): if flows is None: flows = [] for step in steps.values(): if "flow" in step: flow = step["flow"] if flow == "None": continue if flow in flows: raise FlowInfiniteLoopError( "Infinite flows detected with flow {}".format(flow) ) flows.append(flow) flow_config = self.project_config.get_flow(flow) self._check_infinite_flows(flow_config.steps, flows)
Recursively loop through the flow_config and check if there are any cycles. :param steps: Set of step definitions to loop through :param flows: Flows already visited. :return: None
172
def _OpenPathSpec(self, path_specification, ascii_codepage=): if not path_specification: return None file_entry = self._file_system.GetFileEntryByPathSpec(path_specification) if file_entry is None: return None file_object = file_entry.GetFileObject() if file_object is None: return None registry_file = dfwinreg_regf.REGFWinRegistryFile( ascii_codepage=ascii_codepage) try: registry_file.Open(file_object) except IOError as exception: logger.warning( .format( exception)) file_object.close() return None return registry_file
Opens the Windows Registry file specified by the path specification. Args: path_specification (dfvfs.PathSpec): path specification. ascii_codepage (Optional[str]): ASCII string codepage. Returns: WinRegistryFile: Windows Registry file or None.
173
def create_widget(self): d = self.declaration self.widget = CheckBox(self.get_context(), None, d.style or "@attr/checkboxStyle")
Create the underlying widget.
174
def get_licenses(self): url_parameters = dict() return github.PaginatedList.PaginatedList( github.License.License, self.__requester, "/licenses", url_parameters )
:calls: `GET /licenses <https://developer.github.com/v3/licenses/#list-all-licenses>`_ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.License.License`
175
def get_object(self, item): if isinstance(item, six.string_types): item = self.object_manager.get(item) return item
Returns a StorageObject matching the specified item. If no such object exists, a NotFound exception is raised. If 'item' is not a string, that item is returned unchanged.
176
def _readconfig(): config = ConfigParser.SafeConfigParser() try: found = config.read(littlechef.CONFIGFILE) except ConfigParser.ParsingError as e: abort(str(e)) if not len(found): try: found = config.read([, ]) except ConfigParser.ParsingError as e: abort(str(e)) if len(found): print({0}\ .format(found[0], littlechef.CONFIGFILE)) else: abort( .format(littlechef.CONFIGFILE)) in_a_kitchen, missing = _check_appliances() missing_str = lambda m: .join(.join(m).rsplit(, 1)) if not in_a_kitchen: abort("Couldnfixfix new_kitchenuserinfossh-configYou need to define a "userinfo" section in the config file. Refer to the README for help (http://github.com/tobami/littlechef)t open the ssh-config file " "".format(env.ssh_config_path)) except Exception: abort("Couldn{0}connectiongatewayconnectionhttp_proxyconnectionhttps_proxyuserinforemove_data_bagsuserinfoencrypted_data_bag_secret{0}sshsudo_prefixuserinfouserYou need to define a user in the "userinfo" section of {0}. Refer to the README for help (http://github.com/tobami/littlechef)userinfopassworduserinfokeypair-fileYou need to define a password, keypair file, or ssh-config file in {0}s Chef Solo working directory for storing cookbooks, roles, etc. try: env.node_work_path = os.path.expanduser(config.get(, )) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): env.node_work_path = littlechef.node_work_path else: if not env.node_work_path: abort() try: env.follow_symlinks = config.getboolean(, ) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): env.follow_symlinks = False try: env.berksfile = config.get(, ) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError) as e: env.berksfile = None else: try: env.berksfile_cookbooks_directory = config.get(, ) littlechef.cookbook_paths.append(env.berksfile_cookbooks_directory) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError) as e: if env.berksfile: env.berksfile_cookbooks_directory = tempfile.mkdtemp() littlechef.cookbook_paths.append(env.berksfile_cookbooks_directory) else: env.berksfile_cookbooks_directory = None chef.ensure_berksfile_cookbooks_are_installed() try: env.sync_packages_dest_dir = config.get(, ) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): env.sync_packages_dest_dir = None try: env.sync_packages_local_dir = config.get(, ) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): env.sync_packages_local_dir = None try: env.autodeploy_chef = config.get(, ) or None except ConfigParser.NoOptionError: env.autodeploy_chef = None
Configures environment variables
177
def servicegroup_server_exists(sg_name, s_name, s_port=None, **connection_args): *serviceGroupNameserverNameserverPort return _servicegroup_get_server(sg_name, s_name, s_port, **connection_args) is not None
Check if a server:port combination is a member of a servicegroup CLI Example: .. code-block:: bash salt '*' netscaler.servicegroup_server_exists 'serviceGroupName' 'serverName' 'serverPort'
178
def compute_effsize(x, y, paired=False, eftype=): if not _check_eftype(eftype): err = "Could not interpret input ".format(eftype) raise ValueError(err) x = np.asarray(x) y = np.asarray(y) if x.size != y.size and paired: warnings.warn("x and y have unequal sizes. Switching to " "paired == False.") paired = False x, y = remove_na(x, y, paired=paired) nx, ny = x.size, y.size if ny == 1: d = (x.mean() - y) / x.std(ddof=1) return d if eftype.lower() == : sd_control = np.min([x.std(ddof=1), y.std(ddof=1)]) d = (x.mean() - y.mean()) / sd_control return d elif eftype.lower() == : from scipy.stats import pearsonr r, _ = pearsonr(x, y) return r elif eftype.lower() == : diff = x[:, None] - y return max((diff < 0).sum(), (diff > 0).sum()) / diff.size else:
Calculate effect size between two set of observations. Parameters ---------- x : np.array or list First set of observations. y : np.array or list Second set of observations. paired : boolean If True, uses Cohen d-avg formula to correct for repeated measurements (Cumming 2012) eftype : string Desired output effect size. Available methods are :: 'none' : no effect size 'cohen' : Unbiased Cohen d 'hedges' : Hedges g 'glass': Glass delta 'r' : correlation coefficient 'eta-square' : Eta-square 'odds-ratio' : Odds ratio 'AUC' : Area Under the Curve 'CLES' : Common language effect size Returns ------- ef : float Effect size See Also -------- convert_effsize : Conversion between effect sizes. compute_effsize_from_t : Convert a T-statistic to an effect size. Notes ----- Missing values are automatically removed from the data. If ``x`` and ``y`` are paired, the entire row is removed. If ``x`` and ``y`` are independent, the Cohen's d is: .. math:: d = \\frac{\\overline{X} - \\overline{Y}} {\\sqrt{\\frac{(n_{1} - 1)\\sigma_{1}^{2} + (n_{2} - 1) \\sigma_{2}^{2}}{n1 + n2 - 2}}} If ``x`` and ``y`` are paired, the Cohen :math:`d_{avg}` is computed: .. math:: d_{avg} = \\frac{\\overline{X} - \\overline{Y}} {0.5 * (\\sigma_1 + \\sigma_2)} The Cohen’s d is a biased estimate of the population effect size, especially for small samples (n < 20). It is often preferable to use the corrected effect size, or Hedges’g, instead: .. math:: g = d * (1 - \\frac{3}{4(n_1 + n_2) - 9}) If eftype = 'glass', the Glass :math:`\\delta` is reported, using the group with the lowest variance as the control group: .. math:: \\delta = \\frac{\\overline{X} - \\overline{Y}}{\\sigma_{control}} References ---------- .. [1] Lakens, D., 2013. Calculating and reporting effect sizes to facilitate cumulative science: a practical primer for t-tests and ANOVAs. Front. Psychol. 4, 863. https://doi.org/10.3389/fpsyg.2013.00863 .. [2] Cumming, Geoff. Understanding the new statistics: Effect sizes, confidence intervals, and meta-analysis. Routledge, 2013. Examples -------- 1. Compute Cohen d from two independent set of observations. >>> import numpy as np >>> from pingouin import compute_effsize >>> np.random.seed(123) >>> x = np.random.normal(2, size=100) >>> y = np.random.normal(2.3, size=95) >>> d = compute_effsize(x=x, y=y, eftype='cohen', paired=False) >>> print(d) -0.2835170152506578 2. Compute Hedges g from two paired set of observations. >>> import numpy as np >>> from pingouin import compute_effsize >>> x = [1.62, 2.21, 3.79, 1.66, 1.86, 1.87, 4.51, 4.49, 3.3 , 2.69] >>> y = [0.91, 3., 2.28, 0.49, 1.42, 3.65, -0.43, 1.57, 3.27, 1.13] >>> g = compute_effsize(x=x, y=y, eftype='hedges', paired=True) >>> print(g) 0.8370985097811404 3. Compute Glass delta from two independent set of observations. The group with the lowest variance will automatically be selected as the control. >>> import numpy as np >>> from pingouin import compute_effsize >>> np.random.seed(123) >>> x = np.random.normal(2, scale=1, size=50) >>> y = np.random.normal(2, scale=2, size=45) >>> d = compute_effsize(x=x, y=y, eftype='glass') >>> print(d) -0.1170721973604153
179
def members_entries(self, all_are_optional: bool=False) -> List[Tuple[str, str]]: rval = [] if self._members: for member in self._members: rval += member.members_entries(all_are_optional) elif self._choices: for choice in self._choices: rval += self._context.reference(choice).members_entries(True) else: return [] return rval
Return an ordered list of elements for the _members section :param all_are_optional: True means we're in a choice situation so everything is optional :return:
180
def p_expression_And(self, p): p[0] = And(p[1], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
expression : expression AND expression
181
def func_call_as_str(name, *args, **kwds): return .format( name, .join(itertools.chain( map(.format, args), map(.format, sorted(kwds.items())))))
Return arguments and keyword arguments as formatted string >>> func_call_as_str('f', 1, 2, a=1) 'f(1, 2, a=1)'
182
def copy(self, src, dst, other_system=None): copy_source = self.get_client_kwargs(src) copy_destination = self.get_client_kwargs(dst) with _handle_oss_error(): bucket = self._get_bucket(copy_destination) bucket.copy_object( source_bucket_name=copy_source[], source_key=copy_source[], target_key=copy_destination[])
Copy object of the same storage. Args: src (str): Path or URL. dst (str): Path or URL. other_system (pycosio._core.io_system.SystemBase subclass): Unused.
183
def to_int(self, number, default=0): try: return int(number) except (KeyError, ValueError): return self.to_int(default, 0)
Returns an integer
184
def elect(self, candidate_aggregates, candidate_id): assert(candidate_id not in self.candidates_elected) elected_no = len(self.candidates_elected) + 1 self.candidates_elected[candidate_id] = True transfer_value = 0 excess_votes = paper_count = None if len(self.candidates_elected) != self.vacancies: excess_votes = max(candidate_aggregates.get_vote_count(candidate_id) - self.quota, 0) assert(excess_votes >= 0) paper_count = self.candidate_bundle_transactions.get_paper_count(candidate_id) if paper_count > 0: transfer_value = fractions.Fraction(excess_votes, paper_count) assert(transfer_value >= 0) self.election_distributions_pending.append((candidate_id, transfer_value, excess_votes)) self.results.candidate_elected( CandidateElected( candidate_id=candidate_id, order=elected_no, excess_votes=excess_votes, paper_count=paper_count, transfer_value=transfer_value))
Elect a candidate, updating internal state to track this. Calculate the paper count to be transferred on to other candidates, and if required schedule a distribution fo papers.
185
def LoadExclusions(self, snps): snp_names = [] if len(snps) == 1 and os.path.isfile(snps[0]): snp_names = open(snps).read().strip().split() else: snp_names = snps for snp in snp_names: if len(snp.strip()) > 0: self.ignored_rs.append(snp)
Load locus exclusions. :param snps: Can either be a list of rsids or a file containing rsids. :return: None If snps is a file, the file must only contain RSIDs separated by whitespace (tabs, spaces and return characters).
186
def _prepare_calls(result_file, out_dir, data): sample = dd.get_sample_name(data) out_file = os.path.join(out_dir, "%s-optitype.csv" % (sample)) if not utils.file_uptodate(out_file, result_file): hla_truth = bwakit.get_hla_truthset(data) with file_transaction(data, out_file) as tx_out_file: with open(tx_out_file, "w") as out_handle: writer = csv.writer(out_handle) allele_info = _parse_result_file(result_file) if len(allele_info) == 1: writer.writerow(["sample", "locus", "alleles", "expected", "validates"]) else: writer.writerow(["sample", "local", "index", "alleles", "score"]) for j, (alleles, score) in enumerate(allele_info): for hla_locus, call_alleles in alleles: truth_alleles = tz.get_in([sample, hla_locus], hla_truth, []) if len(allele_info) == 1: writer.writerow([sample, hla_locus, ";".join(call_alleles), ";".join(truth_alleles), bwakit.matches_truth(call_alleles, truth_alleles, data)]) else: writer.writerow([sample, hla_locus, j, ";".join(call_alleles), score]) return out_file
Write summary file of results of HLA typing by allele.
187
def from_dict(cls, operation, client, **caller_metadata): operation_pb = json_format.ParseDict(operation, operations_pb2.Operation()) result = cls(operation_pb.name, client, **caller_metadata) result._update_state(operation_pb) result._from_grpc = False return result
Factory: construct an instance from a dictionary. :type operation: dict :param operation: Operation as a JSON object. :type client: :class:`~google.cloud.client.Client` :param client: The client used to poll for the status of the operation. :type caller_metadata: dict :param caller_metadata: caller-assigned metadata about the operation :rtype: :class:`Operation` :returns: new instance, with attributes based on the protobuf.
188
def _query_entities(self, table_name, filter=None, select=None, max_results=None, marker=None, accept=TablePayloadFormat.JSON_MINIMAL_METADATA, property_resolver=None, timeout=None, _context=None): _validate_not_none(, table_name) _validate_not_none(, accept) next_partition_key = None if marker is None else marker.get() next_row_key = None if marker is None else marker.get() request = HTTPRequest() request.method = request.host_locations = self._get_host_locations(secondary=True) request.path = + _to_str(table_name) + request.headers = {: _to_str(accept)} request.query = { : _to_str(filter), : _to_str(select), : _int_to_str(max_results), : _to_str(next_partition_key), : _to_str(next_row_key), : _int_to_str(timeout), } return self._perform_request(request, _convert_json_response_to_entities, [property_resolver, self.require_encryption, self.key_encryption_key, self.key_resolver_function], operation_context=_context)
Returns a list of entities under the specified table. Makes a single list request to the service. Used internally by the query_entities method. :param str table_name: The name of the table to query. :param str filter: Returns only entities that satisfy the specified filter. Note that no more than 15 discrete comparisons are permitted within a $filter string. See http://msdn.microsoft.com/en-us/library/windowsazure/dd894031.aspx for more information on constructing filters. :param str select: Returns only the desired properties of an entity from the set. :param int max_results: The maximum number of entities to return. :param obj marker: A dictionary which identifies the portion of the query to be returned with the next query operation. The operation returns a next_marker element within the response body if the list returned was not complete. This value may then be used as a query parameter in a subsequent call to request the next portion of the list of table. The marker value is opaque to the client. :param str accept: Specifies the accepted content type of the response payload. See :class:`~azure.storage.table.models.TablePayloadFormat` for possible values. :param property_resolver: A function which given the partition key, row key, property name, property value, and the property EdmType if returned by the service, returns the EdmType of the property. Generally used if accept is set to JSON_NO_METADATA. :type property_resolver: func(pk, rk, prop_name, prop_value, service_edm_type) :param int timeout: The server timeout, expressed in seconds. :return: A list of entities, potentially with a next_marker property. :rtype: list(:class:`~azure.storage.table.models.Entity`)
189
def pretty_str(self, indent=0): indent = * indent if self.value is not None: return .format(indent, self.name, pretty_str(self.value)) return indent + self.name
Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation.
190
def cmd_host(verbose): if verbose: logging.basicConfig(level=logging.INFO, format=) print("Gather information about the host...", file=sys.stderr) result = gather_details() if result: print(json.dumps(result, indent=4)) else: print("[X] Unable to gather information") return True
Collect information about the host where habu is running. Example: \b $ habu.host { "kernel": [ "Linux", "demo123", "5.0.6-200.fc29.x86_64", "#1 SMP Wed Apr 3 15:09:51 UTC 2019", "x86_64", "x86_64" ], "distribution": [ "Fedora", "29", "Twenty Nine" ], "libc": [ "glibc", "2.2.5" ], "arch": "x86_64", "python_version": "3.7.3", "os_name": "Linux", "cpu": "x86_64", "static_hostname": "demo123", "fqdn": "demo123.lab.sierra" }
191
def current_rev_reg_id(base_dir: str, cd_id: str) -> str: tags = [int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if cd_id in basename(f)] if not tags: raise AbsentTails(.format(cd_id)) return rev_reg_id(cd_id, str(max(tags)))
Return the current revocation registry identifier for input credential definition identifier, in input directory. Raise AbsentTails if no corresponding tails file, signifying no such revocation registry defined. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: identifier for current revocation registry on input credential definition identifier
192
def merge_configs(config: Dict[str, Any], default_config: Dict[str, Any]) -> Dict[str, Any]: for key in default_config: if key in config: if isinstance(config[key], dict) and isinstance(default_config[key], dict): merge_configs(config[key], default_config[key]) else: config[key]: Any = default_config[key] return config
Merges a `default` config with DAG config. Used to set default values for a group of DAGs. :param config: config to merge in default values :type config: Dict[str, Any] :param default_config: config to merge default values from :type default_config: Dict[str, Any] :returns: dict with merged configs :type: Dict[str, Any]
193
def userpass(self, dir="ppcoin"): source = os.path.expanduser("~/.{0}/{0}.conf").format(dir) dest = open(source, "r") with dest as conf: for line in conf: if line.startswith("rpcuser"): username = line.split("=")[1].strip() if line.startswith("rpcpassword"): password = line.split("=")[1].strip() return username, password
Reads config file for username/password
194
def get(self, mail): users = (v for v in self.list() if v.get() == mail) for i in users: self.log.debug(i) return i return None
Get one document store into LinShare.
195
def t_NATIVEPHP(t): r lineNoInc(t) t.value = t.value[6:].lstrip() pos2 = t.value.rfind() t.value = t.value[0:pos2].rstrip() return t
r'<\?php((?!<\?php)[\s\S])*\?>[ \t]*(?=\n)
196
def cos_zen(utc_time, lon, lat): lon = np.deg2rad(lon) lat = np.deg2rad(lat) r_a, dec = sun_ra_dec(utc_time) h__ = _local_hour_angle(utc_time, lon, r_a) return (np.sin(lat) * np.sin(dec) + np.cos(lat) * np.cos(dec) * np.cos(h__))
Cosine of the sun-zenith angle for *lon*, *lat* at *utc_time*. utc_time: datetime.datetime instance of the UTC time lon and lat in degrees.
197
def __collect_interfaces_return(interfaces): acc = [] for (interfaceName, interfaceData) in interfaces.items(): signalValues = interfaceData.get("signals", {}) for (signalName, signalValue) in signalValues.items(): pinName = "{0}.{1}".format(interfaceName, signalName) acc.append({: pinName, : signalValue}) return acc
Collect new style (44.1+) return values to old-style kv-list
198
def find_satisfied_condition(conditions, ps): assert is_iterable_typed(conditions, property_set.PropertySet) assert isinstance(ps, property_set.PropertySet) for condition in conditions: found_all = True for i in condition.all(): if i.value: found = i.value in ps.get(i.feature) else: found = not ps.get(i.feature) found_all = found_all and found if found_all: return condition return None
Returns the first element of 'property-sets' which is a subset of 'properties', or an empty list if no such element exists.
199
def update_model_cache(table_name): model_cache_info = ModelCacheInfo(table_name, uuid.uuid4().hex) model_cache_backend.share_model_cache_info(model_cache_info)
Updates model cache by generating a new key for the model