Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
8,400
def _read_config_callback(self, data): """Callback function when the ZKConfigManager reads new config data. Args: data: A string, the new data in the config file. """ # In case of corrupted data. try: decoded_data = json.loads(data) if type(decoded_data) is dict: valid_data = {} # After dumping a dict to json and then loading it back, # all keys in the original dict will be converted to str # type, regardless what original types they have. We shall # convert keys back to certain type if it is specified. try: for k, v in decoded_data.items(): if self.key_type is not None: k = self.key_type(k) valid_data[k] = v except __HOLE__: log.error("Loaded dict contains key(s) that are not able to be converted to the original type.") valid_data = {} self._dict = valid_data else: log.error("Loaded data: bad format, expecting a dict") self._dict = {} except Exception: log.error("Unable to load data, exception encountered") self._dict = {}
ValueError
dataset/ETHPy150Open pinterest/kingpin/kingpin/manageddata/managed_datastructures.py/ManagedHashMap._read_config_callback
8,401
def _reload_config_data(self): """Reload the data from config file into ``self._dict`` Note: When changing the managed list using add() and remove() from command line, the DataWatcher's greenlet does not work, you need to call this explicitly to update the list so as to make following changes. """ try: self.zk_config_manager.reload_config_data() except __HOLE__: log.info('Error reading config file in managed map %s:%s' % ( self.list_domain, self.list_key)) # Assume there is empty data in the config file. self._read_config_callback('')
IOError
dataset/ETHPy150Open pinterest/kingpin/kingpin/manageddata/managed_datastructures.py/ManagedHashMap._reload_config_data
8,402
def _reload_config_data(self): """Reload the data from config file into ``self._dict`` Note: When changing the managed mapped list using add() and remove() from command line, the DataWatcher's greenlet does not work, you need to call this explicitly to update the list so as to make following changes. """ try: self.zk_config_manager.reload_config_data() except __HOLE__: log.info('Error reading config file in managed mapped list %s:%s' % ( self.list_domain, self.list_key)) # Assume there is empty data in the config file. self._read_config_callback('{}')
IOError
dataset/ETHPy150Open pinterest/kingpin/kingpin/manageddata/managed_datastructures.py/ManagedMappedList._reload_config_data
8,403
def _reload_config_data(self): """Reload the data from config file into 'self._json_config' Note: When changing the managed json config using set_json_config() from command line, the DataWatcher's greenlet does not work, you need to call this explicitly to update the config so as to make following changes. """ try: self.zk_config_manager.reload_config_data() except __HOLE__: log.info('Error reading config file in managed json config %s:%s' % ( self.list_domain, self.list_key)) # Assume there is empty data in the config file. self._read_config_callback('')
IOError
dataset/ETHPy150Open pinterest/kingpin/kingpin/manageddata/managed_datastructures.py/ManagedJsonConfig._reload_config_data
8,404
def set_data(self, new_data): """Serialize and persist new data to ZK. Args: new_value: The new json config Returns: True if update succeeds, False otherwise """ try: old_data = self.get_data() serialized_data = json.dumps(old_data, cls=self.encoder_cls, sort_keys=True) if old_data else '' serialized_new_data = json.dumps(new_data, cls=self.encoder_cls, sort_keys=True) if new_data else '' except __HOLE__: log.error("Error JSON-serializing data for managed data config") log.error(self.get_data()) log.error(new_data) return False return self.zk_config_manager.update_zk( serialized_data, serialized_new_data, self.force_config_update)
TypeError
dataset/ETHPy150Open pinterest/kingpin/kingpin/manageddata/managed_datastructures.py/ManagedJsonSerializableDataConfig.set_data
8,405
def _reload_config_data(self): """Reload (and deserialize) data from the config file into 'self._data'. Note: When changing the config using self.set_data() from the command line, the DataWatcher's greenlet does not work, so you need to call this method explicitly to update the config. (Note copied from ManagedJsonConfig:_reload_config_data). """ try: self.zk_config_manager.reload_config_data() except __HOLE__: log.info('Error reading config file in managed json config %s:%s' % ( self.list_domain, self.list_key)) # Assume there is empty data in the config file. self._read_config_callback('')
IOError
dataset/ETHPy150Open pinterest/kingpin/kingpin/manageddata/managed_datastructures.py/ManagedJsonSerializableDataConfig._reload_config_data
8,406
def load_input(filename): try: with open(filename) as f: intermediate_code = f.read() except (OSError, __HOLE__) as e: print("something's wrong with %s" % filename) exit(1) return intermediate_code
IOError
dataset/ETHPy150Open alehander42/pseudo/pseudo/loader.py/load_input
8,407
def mux(seed_pool, n_samples, k, lam=256.0, pool_weights=None, with_replacement=True, prune_empty_seeds=True, revive=False): '''Stochastic multiplexor for generator seeds. Given an array of Streamer objects, do the following: 1. Select ``k`` seeds at random to activate 2. Assign each activated seed a sample count ~ Poisson(lam) 3. Yield samples from the streams by randomly multiplexing from the active set. 4. When a stream is exhausted, select a new one from the pool. Parameters ---------- seed_pool : iterable of Streamer The collection of Streamer objects n_samples : int > 0 or None The number of samples to generate. If ``None``, sample indefinitely. k : int > 0 The number of streams to keep active at any time. lam : float > 0 or None Rate parameter for the Poisson distribution governing sample counts for individual streams. If ``None``, sample infinitely from each stream. pool_weights : np.ndarray or None Optional weighting for ``seed_pool``. If ``None``, then weights are assumed to be uniform. Otherwise, ``pool_weights[i]`` defines the sampling proportion of ``seed_pool[i]``. Must have the same length as ``seed_pool``. with_replacement : bool Sample Streamers with replacement. This allows a single stream to be used multiple times (even simultaneously). If ``False``, then each Streamer is consumed at most once and never revisited. prune_empty_seeds : bool Disable seeds from the pool that produced no data. If ``True``, Streamers that previously produced no data are never revisited. Note that this may be undesireable for streams where past emptiness may not imply future emptiness. revive: bool If ``with_replacement`` is ``False``, setting ``revive=True`` will re-insert previously exhausted seeds into the candidate set. This configuration allows a seed to be active at most once at any time. ''' n_seeds = len(seed_pool) if not n_seeds: raise RuntimeError('Cannot mux an empty seed-pool') # Set up the sampling distribution over streams seed_distribution = 1./n_seeds * np.ones(n_seeds) if pool_weights is None: pool_weights = seed_distribution.copy() pool_weights = np.atleast_1d(pool_weights) assert len(pool_weights) == len(seed_pool) assert (pool_weights > 0.0).any() pool_weights /= np.sum(pool_weights) # Instantiate the pool streams = [None] * k stream_weights = np.zeros(k) stream_counts = np.zeros(k, dtype=int) stream_idxs = np.zeros(k, dtype=int) for idx in range(k): if not (seed_distribution > 0).any(): break stream_idxs[idx] = np.random.choice(n_seeds, p=seed_distribution) streams[idx], stream_weights[idx] = generate_new_seed( stream_idxs[idx], seed_pool, pool_weights, seed_distribution, lam, with_replacement) weight_norm = np.sum(stream_weights) # Main sampling loop n = 0 if n_samples is None: n_samples = np.inf while n < n_samples and weight_norm > 0.0: # Pick a stream from the active set idx = np.random.choice(k, p=stream_weights / weight_norm) # Can we sample from it? try: # Then yield the sample yield six.advance_iterator(streams[idx]) # Increment the sample counter n += 1 stream_counts[idx] += 1 except __HOLE__: # Oops, this one's exhausted. if prune_empty_seeds and stream_counts[idx] == 0: # If we're disabling empty seeds, see if this stream produced data seed_distribution[stream_idxs[idx]] = 0.0 if revive and not with_replacement: # If we need to revive a seed, give it the max current probability if seed_distribution.any(): seed_distribution[stream_idxs[idx]] = np.max(seed_distribution) else: seed_distribution[stream_idxs[idx]] = 1.0 if (seed_distribution > 0).any(): # Replace it and move on if there are still seedsin the pool. seed_distribution[:] /= np.sum(seed_distribution) stream_idxs[idx] = np.random.choice(n_seeds, p=seed_distribution) streams[idx], stream_weights[idx] = generate_new_seed( stream_idxs[idx], seed_pool, pool_weights, seed_distribution, lam, with_replacement) stream_counts[idx] = 0 else: # Otherwise, this one's exhausted. Set its probability to 0 stream_weights[idx] = 0.0 weight_norm = np.sum(stream_weights)
StopIteration
dataset/ETHPy150Open bmcfee/pescador/pescador/util.py/mux
8,408
def get_format_modules(reverse=False, locale=None): """ Returns an iterator over the format modules found in the project and Django. """ modules = [] if not locale or not check_for_language(get_language()) \ or not settings.USE_L10N: return modules if not locale: locale = get_language() locale = to_locale(locale) if settings.FORMAT_MODULE_PATH: format_locations = [settings.FORMAT_MODULE_PATH + '.%s'] else: format_locations = [] format_locations.append('django.conf.locale.%s') for location in format_locations: for l in (locale, locale.split('_')[0]): try: mod = import_module('.formats', location % l) except __HOLE__: pass else: # Don't return duplicates if mod not in modules: modules.append(mod) if reverse: modules.reverse() return modules
ImportError
dataset/ETHPy150Open willhardy/Roll-Your-Own/rollyourown/commerce/utils/formatting.py/get_format_modules
8,409
def get_format(format_type, locale=None): """ For a specific format type, returns the format for the current language (locale), defaults to the format in the settings. format_type is the name of the format, e.g. 'DATE_FORMAT' """ format_type = smart_str(format_type) if settings.USE_L10N: for module in get_format_modules(locale=locale): try: return getattr(module, format_type) except __HOLE__: pass return getattr(settings, format_type)
AttributeError
dataset/ETHPy150Open willhardy/Roll-Your-Own/rollyourown/commerce/utils/formatting.py/get_format
8,410
def feed(request, url, feed_dict=None): """Provided for backwards compatibility.""" if not feed_dict: raise Http404(_(u"No feeds are registered.")) try: slug, param = url.split('/', 1) except __HOLE__: slug, param = url, '' try: f = feed_dict[slug] except KeyError: raise Http404(_(u"Slug %r isn't registered.") % slug) instance = f() instance.feed_url = getattr(f, 'feed_url', None) or request.path instance.title_template = f.title_template or ('feeds/%s_title.html' % slug) instance.description_template = f.description_template or ('feeds/%s_description.html' % slug) return instance(request)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/contrib/gis/views.py/feed
8,411
def _copy_data(instream, outstream): # Copy one stream to another sent = 0 if hasattr(sys.stdin, 'encoding'): enc = sys.stdin.encoding else: enc = 'ascii' while True: data = instream.read(1024) if not data: break sent += len(data) logger.debug("sending chunk (%d): %r", sent, data[:256]) try: outstream.write(data) except UnicodeError: outstream.write(data.encode(enc)) except: # Can sometimes get 'broken pipe' errors even when the data has all # been sent logger.exception('Error sending data') break try: outstream.close() except __HOLE__: logger.warning('Exception occurred while closing: ignored', exc_info=1) logger.debug("closed output, %d bytes sent", sent)
IOError
dataset/ETHPy150Open buanzo/jiffy/gnupg.py/_copy_data
8,412
def _make_binary_stream(s, encoding): if _py3k: if isinstance(s, str): s = s.encode(encoding) else: if type(s) is not str: s = s.encode(encoding) try: from io import BytesIO rv = BytesIO(s) except __HOLE__: rv = StringIO(s) return rv
ImportError
dataset/ETHPy150Open buanzo/jiffy/gnupg.py/_make_binary_stream
8,413
def _collect_output(self, process, result, writer=None, stdin=None): """ Drain the subprocesses output streams, writing the collected output to the result. If a writer thread (writing to the subprocess) is given, make sure it's joined before returning. If a stdin stream is given, close it before returning. """ stderr = codecs.getreader(self.encoding)(process.stderr) rr = threading.Thread(target=self._read_response, args=(stderr, result)) rr.setDaemon(True) logger.debug('stderr reader: %r', rr) rr.start() stdout = process.stdout dr = threading.Thread(target=self._read_data, args=(stdout, result)) dr.setDaemon(True) logger.debug('stdout reader: %r', dr) dr.start() dr.join() rr.join() if writer is not None: writer.join() process.wait() if stdin is not None: try: stdin.close() except __HOLE__: pass stderr.close() stdout.close()
IOError
dataset/ETHPy150Open buanzo/jiffy/gnupg.py/GPG._collect_output
8,414
def sign_file(self, file, keyid=None, passphrase=None, clearsign=True, detach=False, binary=False): """sign file""" logger.debug("sign_file: %s", file) if binary: args = ['-s'] else: args = ['-sa'] # You can't specify detach-sign and clearsign together: gpg ignores # the detach-sign in that case. if detach: args.append("--detach-sign") elif clearsign: args.append("--clearsign") if keyid: args.append('--default-key %s' % shell_quote(keyid)) result = self.result_map['sign'](self) #We could use _handle_io here except for the fact that if the #passphrase is bad, gpg bails and you can't write the message. p = self._open_subprocess(args, passphrase is not None) try: stdin = p.stdin if passphrase: _write_passphrase(stdin, passphrase, self.encoding) writer = _threaded_copy_data(file, stdin) except __HOLE__: logging.exception("error writing message") writer = None self._collect_output(p, result, writer, stdin) return result
IOError
dataset/ETHPy150Open buanzo/jiffy/gnupg.py/GPG.sign_file
8,415
def gen_key_input(self, **kwargs): """ Generate --gen-key input per gpg doc/DETAILS """ parms = {} for key, val in list(kwargs.items()): key = key.replace('_','-').title() if str(val).strip(): # skip empty strings parms[key] = val parms.setdefault('Key-Type','RSA') parms.setdefault('Key-Length',2048) parms.setdefault('Name-Real', "Autogenerated Key") try: logname = os.environ['LOGNAME'] except __HOLE__: logname = os.environ['USERNAME'] hostname = socket.gethostname() parms.setdefault('Name-Email', "%s@%s" % (logname.replace(' ', '_'), hostname)) out = "Key-Type: %s\n" % parms.pop('Key-Type') for key, val in list(parms.items()): out += "%s: %s\n" % (key, val) out += "%commit\n" return out # Key-Type: RSA # Key-Length: 1024 # Name-Real: ISdlink Server on %s # Name-Comment: Created by %s # Name-Email: isdlink@%s # Expire-Date: 0 # %commit # # # Key-Type: DSA # Key-Length: 1024 # Subkey-Type: ELG-E # Subkey-Length: 1024 # Name-Real: Joe Tester # Name-Comment: with stupid passphrase # Name-Email: joe@foo.bar # Expire-Date: 0 # Passphrase: abc # %pubring foo.pub # %secring foo.sec # %commit # # ENCRYPTION #
KeyError
dataset/ETHPy150Open buanzo/jiffy/gnupg.py/GPG.gen_key_input
8,416
def get_value(self): if self.static: val = self.default else: try: val = getattr(self, 'db_value') except __HOLE__: val = self.default return self.field.to_python(val)
AttributeError
dataset/ETHPy150Open idlesign/django-siteprefs/siteprefs/utils.py/PrefProxy.get_value
8,417
def get_pref_model_class(app, prefs, get_prefs_func): """Returns preferences model class dynamically crated for a given app or None on conflict.""" model_dict = { '_prefs_app': app, '_get_prefs': staticmethod(get_prefs_func), '__module__': '%s.%s' % (app, PREFS_MODULE_NAME), 'Meta': type('Meta', (models.options.Options,), { 'verbose_name': _('Preference'), 'verbose_name_plural': _('Preferences') }) } for field_name, val_proxy in prefs.items(): model_dict[field_name] = val_proxy.field try: # Make Django 1.7 happy. model = type('Preferences', (models.Model,), model_dict) except __HOLE__: return None def fake_save_base(self, *args, **kwargs): updated_prefs = { f.name: getattr(self, f.name) for f in self._meta.fields if not isinstance(f, models.fields.AutoField) } app_prefs = self._get_prefs(self._prefs_app) for pref in app_prefs.keys(): if pref in updated_prefs: app_prefs[pref].db_value = updated_prefs[pref] self.pk = self._prefs_app # Make Django 1.7 happy. prefs_save.send(sender=self, app=self._prefs_app, updated_prefs=updated_prefs) return True model.save_base = fake_save_base return model
RuntimeError
dataset/ETHPy150Open idlesign/django-siteprefs/siteprefs/utils.py/get_pref_model_class
8,418
def test_signed_request_missing_page_data(): try: SignedRequest(TEST_SIGNED_REQUEST_MISSING_PAGE_DATA, TEST_FACEBOOK_APPLICATION_SECRET_KEY) except __HOLE__: raise AssertionError('Missing page data in signed request')
KeyError
dataset/ETHPy150Open jgorset/facepy/tests/test_signed_request.py/test_signed_request_missing_page_data
8,419
@app.route('/pypi/check_update/<dist_name>') def check_pypi_update(dist_name): """ Just check for updates and return a json with the attribute "has_update". :param dist_name: distribution name :rtype: json :return: json with the attribute "has_update" """ pkg_res = get_pkg_res() pkg_dist_version = pkg_res.get_distribution(dist_name).version pypi_rel = get_pypi_releases(dist_name) if pypi_rel: pypi_last_version = pkg_res.parse_version(pypi_rel[0]) current_version = pkg_res.parse_version(pkg_dist_version) if pypi_last_version > current_version: DIST_PYPI_CACHE.add(dist_name.lower()) return jsonify({"has_update": 1}) try: DIST_PYPI_CACHE.remove(dist_name.lower()) except __HOLE__: pass return jsonify({"has_update": 0})
KeyError
dataset/ETHPy150Open perone/stallion/stallion/main.py/check_pypi_update
8,420
@app.route('/pypi/releases/<dist_name>') def releases(dist_name): """ This is the /pypi/releases/<dist_name> entry point, it is the interface between Stallion and the PyPI RPC service when checking for updates. :param dist_name: the package name (distribution name). """ pkg_res = get_pkg_res() data = {} pkg_dist_version = pkg_res.get_distribution(dist_name).version pypi_rel = get_pypi_releases(dist_name) data["dist_name"] = dist_name data["pypi_info"] = pypi_rel data["current_version"] = pkg_dist_version if pypi_rel: pypi_last_version = pkg_res.parse_version(pypi_rel[0]) current_version = pkg_res.parse_version(pkg_dist_version) last_version = pkg_dist_version.lower() != pypi_rel[0].lower() data["last_is_great"] = pypi_last_version > current_version data["last_version_differ"] = last_version if data["last_is_great"]: DIST_PYPI_CACHE.add(dist_name.lower()) else: try: DIST_PYPI_CACHE.remove(dist_name.lower()) except __HOLE__: pass return render_template('pypi_update.html', **data)
KeyError
dataset/ETHPy150Open perone/stallion/stallion/main.py/releases
8,421
def OpenOutput(path, mode='w'): """Open |path| for writing, creating directories if necessary.""" try: os.makedirs(os.path.dirname(path)) except __HOLE__: pass return open(path, mode)
OSError
dataset/ETHPy150Open adobe/brackets-shell/gyp/pylib/gyp/generator/ninja.py/OpenOutput
8,422
def GenerateOutput(target_list, target_dicts, data, params): user_config = params.get('generator_flags', {}).get('config', None) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() if params['parallel']: try: pool = multiprocessing.Pool(len(config_names)) arglists = [] for config_name in config_names: arglists.append( (target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) except __HOLE__, e: pool.terminate() raise e else: for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
KeyboardInterrupt
dataset/ETHPy150Open adobe/brackets-shell/gyp/pylib/gyp/generator/ninja.py/GenerateOutput
8,423
def postOptions(self): if self['in'] is None: raise usage.UsageError("%s\nYou must specify the input filename." % self) if self["typein"] == "guess": try: self["typein"] = sob.guessType(self["in"]) except __HOLE__: raise usage.UsageError("Could not guess type for '%s'" % self["typein"])
KeyError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/scripts/tapconvert.py/ConvertOptions.postOptions
8,424
@register.filter def djdiv(value, arg): """ Divide the value by the arg, using Python 3-style division that returns floats. If bad values are passed in, return the empty string. """ try: return value / arg except (__HOLE__, TypeError): try: return value / arg except Exception: return ''
ValueError
dataset/ETHPy150Open pydanny/dj-stripe/djstripe/templatetags/djstripe_tags.py/djdiv
8,425
def getNewId(self, objType): try: objType = self.remap[objType] except __HOLE__: pass try: id = self.ids[objType] self.ids[objType] += 1 return id except KeyError: self.ids[objType] = self.beginId + 1 return self.beginId
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/versions/v0_9_1/domain/id_scope.py/IdScope.getNewId
8,426
def updateBeginId(self, objType, beginId): try: objType = self.remap[objType] except __HOLE__: pass try: if self.ids[objType] <= beginId: self.ids[objType] = beginId except KeyError: self.ids[objType] = beginId
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/versions/v0_9_1/domain/id_scope.py/IdScope.updateBeginId
8,427
def setBeginId(self, objType, beginId): try: objType = self.remap[objType] except __HOLE__: pass self.ids[objType] = beginId
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/versions/v0_9_1/domain/id_scope.py/IdScope.setBeginId
8,428
def make_sure_path_exists(path): logging.debug('Make sure {} exists'.format(path)) try: os.makedirs(path) except __HOLE__ as e: if e.errno != errno.EEXIST: return False return True
OSError
dataset/ETHPy150Open eyadsibai/brute-force-plotter/brute_force_plotter/utils.py/make_sure_path_exists
8,429
def unescape_html(text): """Created by Fredrik Lundh (http://effbot.org/zone/re-sub.htm#unescape-html)""" def fixup(m): text = m.group(0) if text[:2] == "&#": # character reference try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except __HOLE__: pass else: # named entity try: text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) except KeyError: pass return text # leave as is return re.sub("&#?\w+;", fixup, text)
ValueError
dataset/ETHPy150Open sajao/CrisisLex/src-collect/tweepy1/utils.py/unescape_html
8,430
def import_simplejson(): try: import simplejson as json except ImportError: try: import json # Python 2.6+ except ImportError: try: from django.utils import simplejson as json # Google App Engine except __HOLE__: raise ImportError, "Can't load a json library" return json
ImportError
dataset/ETHPy150Open sajao/CrisisLex/src-collect/tweepy1/utils.py/import_simplejson
8,431
def _construct_ring(self, param, msg='Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings'): "Helper routine for trying to construct a ring from the given parameter." if isinstance(param, LinearRing): return param try: ring = LinearRing(param) return ring except __HOLE__: raise TypeError(msg)
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/contrib/gis/geos/polygon.py/Polygon._construct_ring
8,432
def notifyDone(self, relay): """A relaying SMTP client is disconnected. unmark all pending messages under this relay's responsibility as being relayed, and remove the relay. """ for message in self.manager.managed.get(relay, ()): if self.manager.queue.noisy: log.msg("Setting " + message + " waiting") self.manager.queue.setWaiting(message) try: del self.manager.managed[relay] except __HOLE__: pass notifications = self._completionDeferreds self._completionDeferreds = None for d in notifications: d.callback(None)
KeyError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/mail/relaymanager.py/_AttemptManager.notifyDone
8,433
def notifyNoConnection(self, relay): """Relaying SMTP client couldn't connect. Useful because it tells us our upstream server is unavailable. """ # Back off a bit try: msgs = self.manager.managed[relay] except __HOLE__: log.msg("notifyNoConnection passed unknown relay!") return if self.manager.queue.noisy: log.msg("Backing off on delivery of " + str(msgs)) def setWaiting(queue, messages): map(queue.setWaiting, messages) from twisted.internet import reactor reactor.callLater(30, setWaiting, self.manager.queue, msgs) del self.manager.managed[relay]
KeyError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/mail/relaymanager.py/_AttemptManager.notifyNoConnection
8,434
def markGood(self, mx): """Indicate a given mx host is back online. @type mx: C{str} @param mx: The hostname of the host which is up. """ try: del self.badMXs[mx] except __HOLE__: pass
KeyError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/mail/relaymanager.py/MXCalculator.markGood
8,435
def __init__(self, nagcat, conf): BaseTest.__init__(self, conf) self._nagcat = nagcat self._test = conf.get('test', "") self._description = conf.get('description', self._test) self._documentation = conf.get('documentation', "") self._investigation = conf.get('investigation', "") self._priority = conf.get('priority', "") self._url = conf.get('url', "") self._subtests = {} # Special little value! # Mark this test as CRITICAL if it has been in WARNING # for too long. A value of 0 disables this check. self._warning_time_limit = util.Interval( conf.get('warning_time_limit', 0)) # If self._documentation is a list convert it to a string if isinstance(self._documentation, list): self._documentation = "\n".join(self._documentation) if isinstance(self._investigation, list): self._investigation = "\n".join(self._documentation) if self._priority: self._priority = "Priority: %s\n\n" % self._priority if conf['query.type'] == "compound": self._compound = True conf['query'].expand(recursive=False) self._return = conf.get('query.return', None) for name, qconf in conf['query'].iteritems(): if not isinstance(qconf, struct.Struct): continue self._addDefaults(qconf) self._subtests[name] = nagcat.new_query(qconf, qcls=query.FilteredQuery) self.addDependency(self._subtests[name]) if not self._subtests: raise errors.ConfigError(conf['query'], "compound query must have a sub-query") if self._return or len(self._subtests) > 1: if not self._return: raise errors.ConfigError(conf['query'], "return statement is required") # Convert $(subquery) to data['subquery'] self._return = re.sub("\\$\\(([^\\)]+)\\)", lambda m: "data['%s']" % m.group(1), self._return) test_values = {'NOW': util.MathString('9999')} for name in self._subtests: #XXX this test string isn't fool-proof but will mostly work test_values[name] = util.MathString('9999') try: log.trace("Testing expr %r with data=%r" % (self._return, test_values)) eval(self._return, {'data': test_values}) except SyntaxError, ex: raise errors.ConfigError(conf['query'], "Syntax error in return: %s" % ex) except __HOLE__, ex: raise errors.ConfigError(conf['query'], "Unknown sub-query in return: %s" % ex) else: self._compound = False qconf = conf.get('query') self._addDefaults(qconf) self._subtests['query'] = nagcat.new_query(qconf, qcls=query.FilteredQuery) self.addDependency(self._subtests['query']) self._report_callbacks = []
KeyError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/test.py/Test.__init__
8,436
def read_content(self, stream=False): db = get_blob_db() try: blob = db.get(self.blob_id, self._blobdb_bucket()) except (__HOLE__, NotFound, BadName): raise AttachmentNotFound(self.name) if stream: return blob with blob: return blob.read()
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/form_processor/models.py/AbstractAttachment.read_content
8,437
def walkTroveSet(self, topTrove, ignoreMissing = True, withFiles=True, asTuple=True): """ Generator returns all of the troves included by topTrove, including topTrove itself. It is a depth first search of strong refs. Punchouts are taken into account. @param asTuple: If True, (name, version, flavor) tuples are returned instead of Trove objects. This can be much faster. """ def _collect(l, tup): if tup[1] is None: if trove.troveIsComponent(tup[0][0]): # don't bother looking for children of components tup[1] = [] else: l.append(tup) else: for t in tup[1]: _collect(l, t) if asTuple and hasattr(self, 'getTroveTroves'): assert(not withFiles) seen = set() all = [ topTrove.getNameVersionFlavor(), None ] seen.add(topTrove.getNameVersionFlavor()) while True: getList = [] _collect(getList, all) if not getList: break refs = self.getTroveTroves([ x[0] for x in getList], justPresent = True) for item, refList in itertools.izip(getList, refs): item[1] = [] for x in refList: if x not in seen: seen.add(x) item[1].append([x, None]) stack = [ all ] while stack: next = stack.pop() yield next[0] stack += next[1] return def _format(trv): if asTuple: return trv.getNameVersionFlavor() else: return trv yield _format(topTrove) seen = { topTrove.getName() : [ (topTrove.getVersion(), topTrove.getFlavor()) ] } troveList = [x for x in sorted(topTrove.iterTroveList(strongRefs=True))] while troveList: (name, version, flavor) = troveList[0] del troveList[0] if seen.has_key(name): match = False for (ver, fla) in seen[name]: if version == ver and fla == flavor: match = True break if match: continue seen[name].append((version, flavor)) else: seen[name] = [ (version, flavor) ] try: trv = self.getTrove(name, version, flavor, withFiles=withFiles) yield _format(trv) troveList = ([ x for x in sorted(trv.iterTroveList(strongRefs=True)) ] + troveList) except errors.TroveMissing: if not ignoreMissing: raise except __HOLE__: if not ignoreMissing: raise
KeyError
dataset/ETHPy150Open sassoftware/conary/conary/repository/trovesource.py/AbstractTroveSource.walkTroveSet
8,438
def iterFilesInTrove(self, n, v, f, sortByPath=False, withFiles=False, capsules = False): try: cs = self.troveCsMap[n,v,f] except __HOLE__: raise errors.TroveMissing(n, v) trvCs = cs.getNewTroveVersion(n,v,f) fileList = trvCs.getNewFileList() if not fileList: return if capsules: fileList = [ x for x in fileList if x[0] == trove.CAPSULE_PATHID ] else: fileList = [ x for x in fileList if x[0] != trove.CAPSULE_PATHID ] if not withFiles: if sortByPath: for item in sorted(fileList): yield item else: for item in fileList: yield item return if sortByPath: # files stored in changesets are sorted by pathId, and must be # retrieved in that order. But we want to display them by # path. So, retrieve the info from the changeset by pathId # and stored it in a dict to be retrieved after sorting by # path changes = {} for pathId, path, fileId, version in fileList: changes[pathId] = cs.getFileChange(None, fileId) fileList = sorted(fileList, key=lambda x: x[1]) for pathId, path, fileId, version in fileList: change = changes[pathId] if change is None: fileObj = None else: fileObj = files.ThawFile(change, pathId) yield pathId, path, fileId, version, fileObj
KeyError
dataset/ETHPy150Open sassoftware/conary/conary/repository/trovesource.py/ChangesetFilesTroveSource.iterFilesInTrove
8,439
def getTroves(self, troveList, withFiles = True, allowMissing=True, callback=None): troveList = list(enumerate(troveList)) # make a copy and add indexes numTroves = len(troveList) results = [None] * numTroves for source in self.sources: newTroveList = [] newIndexes = [] try: troves = source.getTroves([x[1] for x in troveList], withFiles=withFiles, callback=callback) except __HOLE__: continue for ((index, troveTup), trove) in itertools.izip(troveList, troves): if trove is None: newTroveList.append((index, troveTup)) else: results[index] = trove troveList = newTroveList if troveList and not allowMissing: raise errors.TroveMissingError(troveList[0][1][0], troveList[0][1][1]) return results
NotImplementedError
dataset/ETHPy150Open sassoftware/conary/conary/repository/trovesource.py/SourceStack.getTroves
8,440
def getFileVersions(self, fileIds): results = [ None ] * len(fileIds) needed = list(enumerate(fileIds)) for source in self.sources: try: newResults = source.getFileVersions([ x[1] for x in needed ]) for result, (i, info) in itertools.izip(newResults, needed): if info: results[i] = result needed = [ tup for tup in needed if results[tup[0]] is None ] if not needed: break # FIXME: there should be a better error for this except (KeyError, __HOLE__), e: continue return results
NotImplementedError
dataset/ETHPy150Open sassoftware/conary/conary/repository/trovesource.py/SourceStack.getFileVersions
8,441
def getFileVersion(self, pathId, fileId, version): for source in self.sources: try: return source.getFileVersion(pathId, fileId, version) # FIXME: there should be a better error for this except (KeyError, __HOLE__), e: continue return None
NotImplementedError
dataset/ETHPy150Open sassoftware/conary/conary/repository/trovesource.py/SourceStack.getFileVersion
8,442
def iterFilesInTrove(self, n, v, f, *args, **kw): for source in self.sources: try: for value in source.iterFilesInTrove(n, v, f, *args, **kw): yield value return except __HOLE__: pass except errors.TroveMissing: pass raise errors.TroveMissing(n,v)
NotImplementedError
dataset/ETHPy150Open sassoftware/conary/conary/repository/trovesource.py/SourceStack.iterFilesInTrove
8,443
def clever_reset_ref(git_project, ref, raises=True): """ Resets only if needed, fetches only if needed """ try: remote_name = git_project.default_remote.name except __HOLE__: error_msg = "Project {} has no default remote, defaulting to origin" ui.error(error_msg.format(git_project.name)) remote_name = "origin" git = qisrc.git.Git(git_project.path) if ref.startswith("refs/"): if raises: git.fetch(remote_name, ref) git.reset("--hard", "FETCH_HEAD") return else: with git.tansaction() as transaction: git.fetch(remote_name, ref) git.reset("--hard", "FETCH_HEAD") return transaction.ok, transaction.output rc, ref_sha1 = git.call("rev-parse", ref, raises=False) if rc != 0: # Maybe this is a newly pushed tag, try to fetch: git.fetch(remote_name) rc, ref_sha1 = git.call("rev-parse", ref, raises=False) if rc != 0: return False, "Could not parse %s as a valid ref" % ref _, actual_sha1 = git.call("rev-parse", "HEAD", raises=False) if actual_sha1 == ref_sha1: # Nothing to do if raises: return else: return True, "" ret, _ = git.call("show", "--oneline", ref, raises=False) if ret == 0: # SHA-1 exists locally if raises: git.reset("--hard", ref) else: rc, out = git.reset("--hard", ref, raises=False) return (rc == 0), out else: # Full fetch in this case if raises: git.fetch(remote_name) git.reset("--hard", ref) else: with git.transaction() as transaction: git.fetch(remote_name) git.reset("--hard", ref) return transaction.ok, transaction.output
AttributeError
dataset/ETHPy150Open aldebaran/qibuild/python/qisrc/reset.py/clever_reset_ref
8,444
def _import_class_or_module(self, name): """ Import a class using its fully-qualified *name*. """ try: path, base = self.py_sig_re.match(name).groups() except: raise ValueError( "Invalid class or module '%s' specified for inheritance diagram" % name) fullname = (path or '') + base path = (path and path.rstrip('.')) if not path: path = base try: module = __import__(path, None, None, []) # We must do an import of the fully qualified name. Otherwise if a # subpackage 'a.b' is requested where 'import a' does NOT provide # 'a.b' automatically, then 'a.b' will not be found below. This # second call will force the equivalent of 'import a.b' to happen # after the top-level import above. my_import(fullname) except __HOLE__: raise ValueError( "Could not import class or module '%s' specified for inheritance diagram" % name) try: todoc = module for comp in fullname.split('.')[1:]: todoc = getattr(todoc, comp) except AttributeError: raise ValueError( "Could not find class or module '%s' specified for inheritance diagram" % name) # If a class, just return it if inspect.isclass(todoc): return [todoc] elif inspect.ismodule(todoc): classes = [] for cls in list(todoc.__dict__.values()): if inspect.isclass(cls) and cls.__module__ == todoc.__name__: classes.append(cls) return classes raise ValueError( "'%s' does not resolve to a class or module" % name)
ImportError
dataset/ETHPy150Open ipython/ipython-py3k/docs/sphinxext/inheritance_diagram.py/InheritanceGraph._import_class_or_module
8,445
def run_dot(self, args, name, parts=0, urls={}, graph_options={}, node_options={}, edge_options={}): """ Run graphviz 'dot' over this graph, returning whatever 'dot' writes to stdout. *args* will be passed along as commandline arguments. *name* is the name of the graph *urls* is a dictionary mapping class names to http urls Raises DotException for any of the many os and installation-related errors that may occur. """ try: dot = subprocess.Popen(['dot'] + list(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) except __HOLE__: raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?") except ValueError: raise DotException("'dot' called with invalid arguments") except: raise DotException("Unexpected error calling 'dot'") self.generate_dot(dot.stdin, name, parts, urls, graph_options, node_options, edge_options) dot.stdin.close() result = dot.stdout.read() returncode = dot.wait() if returncode != 0: raise DotException("'dot' returned the errorcode %d" % returncode) return result
OSError
dataset/ETHPy150Open ipython/ipython-py3k/docs/sphinxext/inheritance_diagram.py/InheritanceGraph.run_dot
8,446
def _validate_volume(driver_info, volume_id): """Validates if volume is in Storage pools designated for ironic.""" volume = _get_volume(driver_info, volume_id) # Check if the ironic <scard>/ironic-<pool_id>/<volume_id> naming scheme # is present in volume id try: pool_id = volume.id.split('/')[1].lower() except __HOLE__: pool_id = "" if "ironic-" in pool_id: return True else: raise exception.InvalidParameterValue(_( "Invalid volume id specified"))
IndexError
dataset/ETHPy150Open openstack/ironic/ironic/drivers/modules/seamicro.py/_validate_volume
8,447
@staticmethod def eval_config_parameter(param): """ Try to evaluate the given parameter as a string or integer and return it properly. This is used to parse i3status configuration parameters such as 'disk "/home" {}' or worse like '"cpu_temperature" 0 {}'. """ params = param.split(' ') result_list = list() for p in params: try: e_value = eval(p) if isinstance(e_value, str) or isinstance(e_value, int): p = str(e_value) else: raise ValueError() except (__HOLE__, SyntaxError, ValueError): pass finally: result_list.append(p) return ' '.join(result_list)
NameError
dataset/ETHPy150Open ultrabug/py3status/py3status/i3status.py/I3status.eval_config_parameter
8,448
@staticmethod def eval_config_value(value): """ Try to evaluate the given parameter as a string or integer and return it properly. This is used to parse i3status configuration parameters such as 'disk "/home" {}' or worse like '"cpu_temperature" 0 {}'. """ if value.lower() in ('true', 'false'): return eval(value.title()) try: e_value = eval(value) if isinstance(e_value, str): if e_value.lower() in ('true', 'false'): value = eval(e_value.title()) else: value = e_value elif isinstance(e_value, int): value = e_value else: raise ValueError() except (NameError, __HOLE__): pass finally: return value
ValueError
dataset/ETHPy150Open ultrabug/py3status/py3status/i3status.py/I3status.eval_config_value
8,449
def i3status_config_reader(self, i3status_config_path): """ Parse i3status.conf so we can adapt our code to the i3status config. """ config = { 'general': { 'color_bad': '#FF0000', 'color_degraded': '#FFFF00', 'color_good': '#00FF00', 'color_separator': '#333333', 'colors': False, 'interval': 5, 'output_format': 'i3bar' }, 'i3s_modules': [], 'on_click': {}, 'order': [], '.group_extras': [], # extra i3status modules needed by groups '.module_groups': {}, # record groups that modules are in 'py3_modules': [] } # some ugly parsing in_section = False section_name = '' group_name = None for line in open(i3status_config_path, 'r'): line = line.strip(' \t\n\r') if not line or line.startswith('#'): continue if line.startswith('order'): in_section = True section_name = 'order' if not in_section and line.startswith('group'): group_name = line.split('{')[0].strip() config[group_name] = {'items': []} continue if not in_section and group_name and line == '}': group_name = None continue if group_name and not in_section and '=' in line: # check this is not a section definition if '{' not in line or line.index('{') > line.index('='): key = line.split('=', 1)[0].strip() key = self.eval_config_parameter(key) value = line.split('=', 1)[1].strip() value = self.eval_config_value(value) if not key.startswith('on_click'): config[group_name][key] = value else: # on_click special parameters try: button = int(key.split()[1]) if button not in range(1, 6): raise ValueError('should be 1, 2, 3, 4 or 5') except IndexError as e: raise IndexError( 'missing "button id" for "on_click" ' 'parameter in group {}'.format(group_name)) except ValueError as e: raise ValueError('invalid "button id" ' 'for "on_click" parameter ' 'in group {} ({})'.format( group_name, e)) on_c = config['on_click'] on_c[group_name] = on_c.get(group_name, {}) on_c[group_name][button] = value continue if not in_section: section_name = line.split('{')[0].strip() section_name = self.eval_config_parameter(section_name) if not section_name: continue else: in_section = True if section_name not in config: config[section_name] = {} if group_name: # update the items in the group config[group_name]['items'].append(section_name) section = config['.module_groups'].setdefault(section_name, []) if group_name not in section: section.append(group_name) if not self.valid_config_param(section_name): # py3status module add a reference to the group and # make sure we have it in the list of modules to # run if section_name not in config['py3_modules']: config['py3_modules'].append(section_name) else: # i3status module. Add to the list of needed # modules and add to the `.group-extras` config to # ensure that it gets run even though not in # `order` config if section_name not in config['i3s_modules']: config['i3s_modules'].append(section_name) if section_name not in config['.group_extras']: config['.group_extras'].append(section_name) if '{' in line: in_section = True if section_name and '=' in line: section_line = line # one liner cases if line.endswith('}'): section_line = section_line.split('}', -1)[0].strip() if line.startswith(section_name + ' {'): section_line = section_line.split(section_name + ' {')[ 1].strip() key = section_line.split('=', 1)[0].strip() key = self.eval_config_parameter(key) value = section_line.split('=', 1)[1].strip() value = self.eval_config_value(value) if section_name == 'order': config[section_name].append(value) line = '}' # create an empty config for this module if value not in config: config[value] = {} # detect internal modules to be loaded dynamically if not self.valid_config_param(value): config['py3_modules'].append(value) else: config['i3s_modules'].append(value) else: if not key.startswith('on_click'): config[section_name][key] = value else: # on_click special parameters try: button = int(key.split()[1]) if button not in range(1, 6): raise ValueError('should be 1, 2, 3, 4 or 5') except IndexError as e: raise IndexError( 'missing "button id" for "on_click" ' 'parameter in section {}'.format(section_name)) except __HOLE__ as e: raise ValueError('invalid "button id" ' 'for "on_click" parameter ' 'in section {} ({})'.format( section_name, e)) on_c = config['on_click'] on_c[section_name] = on_c.get(section_name, {}) on_c[section_name][button] = value if line.endswith('}'): in_section = False section_name = '' # py3status only uses the i3bar protocol because it needs JSON output if config['general']['output_format'] != 'i3bar': raise RuntimeError('i3status output_format should be set' + ' to "i3bar" on {}'.format( i3status_config_path, ' or on your own {}/.i3status.conf'.format( os.path.expanduser( '~')) if i3status_config_path == '/etc/i3status.conf' else '')) # time and tztime modules need a format for correct processing for name in config: if name.split()[0] in TIME_MODULES and 'format' not in config[ name]: if name.split()[0] == 'time': config[name]['format'] = TIME_FORMAT else: config[name]['format'] = TZTIME_FORMAT def clean_i3status_modules(key): # cleanup unconfigured i3status modules that have no default for module_name in deepcopy(config[key]): if (self.valid_config_param(module_name, cleanup=True) and not config.get(module_name)): config.pop(module_name) if module_name in config['i3s_modules']: config['i3s_modules'].remove(module_name) config[key].remove(module_name) clean_i3status_modules('order') clean_i3status_modules('.group_extras') return config
ValueError
dataset/ETHPy150Open ultrabug/py3status/py3status/i3status.py/I3status.i3status_config_reader
8,450
@staticmethod def write_in_tmpfile(text, tmpfile): """ Write the given text in the given tmpfile in python2 and python3. """ try: tmpfile.write(text) except __HOLE__: tmpfile.write(str.encode(text))
TypeError
dataset/ETHPy150Open ultrabug/py3status/py3status/i3status.py/I3status.write_in_tmpfile
8,451
@profile def run(self): """ Spawn i3status using a self generated config file and poll its output. """ try: with NamedTemporaryFile(prefix='py3status_') as tmpfile: self.write_tmp_i3status_config(tmpfile) syslog(LOG_INFO, 'i3status spawned using config file {}'.format( tmpfile.name)) i3status_pipe = Popen( ['i3status', '-c', tmpfile.name], stdout=PIPE, stderr=PIPE, # Ignore the SIGUSR2 signal for this subprocess preexec_fn=lambda: signal(SIGUSR2, SIG_IGN) ) self.poller_inp = IOPoller(i3status_pipe.stdout) self.poller_err = IOPoller(i3status_pipe.stderr) self.tmpfile_path = tmpfile.name # Store the pipe so we can signal it self.i3status_pipe = i3status_pipe try: # loop on i3status output while self.lock.is_set(): line = self.poller_inp.readline() if line: # remove leading comma if present if line[0] == ',': line = line[1:] if line.startswith('[{'): json_list = loads(line) self.last_output = json_list self.set_responses(json_list) self.ready = True else: err = self.poller_err.readline() code = i3status_pipe.poll() if code is not None: msg = 'i3status died' if err: msg += ' and said: {}'.format(err) else: msg += ' with code {}'.format(code) raise IOError(msg) except IOError: err = sys.exc_info()[1] self.error = err except __HOLE__: # we cleanup the tmpfile ourselves so when the delete will occur # it will usually raise an OSError: No such file or directory pass self.i3status_pipe = None
OSError
dataset/ETHPy150Open ultrabug/py3status/py3status/i3status.py/I3status.run
8,452
def _do_action(self, action, path, *args, **kwargs): """Call **action** on each filesystem object in turn. If one raises an :py:class:`IOError`, save the exception and try the rest. If none succeed, re-raise the first exception. """ first_exception = None for fs in self.filesystems: if fs.can_handle_path(path): try: return getattr(fs, action)(path, *args, **kwargs) except __HOLE__ as e: if first_exception is None: first_exception = e if first_exception is None: raise IOError("Can't handle path: %s" % path) else: raise first_exception
IOError
dataset/ETHPy150Open Yelp/mrjob/mrjob/fs/composite.py/CompositeFilesystem._do_action
8,453
def ensure_exists(path): try: os.makedirs(path) except __HOLE__ as e: if e.errno == errno.EEXIST: # (path exists) pass if not os.path.isdir(path): raise
OSError
dataset/ETHPy150Open memex-explorer/memex-explorer/source/apps/crawl_space/utils.py/ensure_exists
8,454
def rm_if_exists(filename): try: os.remove(filename) return True except __HOLE__ as e: if e.errno != errno.ENOENT: # (no such file or directory) raise return False
OSError
dataset/ETHPy150Open memex-explorer/memex-explorer/source/apps/crawl_space/utils.py/rm_if_exists
8,455
def parse_provider_config(type, config): try: instance = manager.get(type) except KeyError: raise ApiError( message='Invalid provider: {}'.format(type), name='invalid_provider', ) result = {} all_options = chain(instance.get_default_options().items(), instance.get_options().items()) for option, option_values in all_options: value = config.get(option) if value and option_values.get('type'): try: config[option] = option_values['type'](value) except (__HOLE__, TypeError): raise ApiError( message='Option "{}" is not a valid type for provider: {}'.format(option, type), name='invalid_check', ) if option_values.get('required') and not value: raise ApiError( message='Missing required option "{}" for provider: {}'.format(option, type), name='invalid_provider', ) result[option] = value return result
ValueError
dataset/ETHPy150Open getsentry/freight/freight/providers/utils.py/parse_provider_config
8,456
def absent(name): ''' Ensures that the user does not exist, eventually delete user. .. versionadded:: 2016.3.0 :param name: user alias :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml George: zabbix_user.absent ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_user_deleted = 'USer {0} deleted.'.format(name) comment_user_notdeleted = 'Unable to delete user: {0}. '.format(name) comment_user_notexists = 'User {0} does not exist.'.format(name) changes_user_deleted = {name: {'old': 'User {0} exists.'.format(name), 'new': 'User {0} deleted.'.format(name), } } user_get = __salt__['zabbix.user_get'](name) # Dry run, test=true mode if __opts__['test']: if not user_get: ret['result'] = True ret['comment'] = comment_user_notexists else: ret['result'] = None ret['comment'] = comment_user_deleted ret['changes'] = changes_user_deleted if not user_get: ret['result'] = True ret['comment'] = comment_user_notexists else: try: userid = user_get[0]['userid'] user_delete = __salt__['zabbix.user_delete'](userid) except __HOLE__: user_delete = False if user_delete and 'error' not in user_delete: ret['result'] = True ret['comment'] = comment_user_deleted ret['changes'] = changes_user_deleted else: ret['result'] = False ret['comment'] = comment_user_notdeleted + str(user_delete['error']) return ret
KeyError
dataset/ETHPy150Open saltstack/salt/salt/states/zabbix_user.py/absent
8,457
def _iso_to_datetime(self, isodate): date_formats = ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S%z') date = None for date_format in date_formats: try: date = datetime.strptime(isodate, date_format) except __HOLE__: pass if date: break return date
ValueError
dataset/ETHPy150Open apache/libcloud/libcloud/loadbalancer/drivers/rackspace.py/RackspaceLBDriver._iso_to_datetime
8,458
def _plot_labels(target, *labels): for l in labels: have_label = False for child in target.get_children(): try: if child.get_text() == l['s'] and child.get_position() == (l['x'], l['y']): have_label = True break except __HOLE__: pass if not have_label: target.text(**l)
AttributeError
dataset/ETHPy150Open scot-dev/scot/scot/plotting.py/_plot_labels
8,459
def __call__(self, environ, start_response): url = [] def change_response(status, headers, exc_info=None): status_code = status.split(' ') try: code = int(status_code[0]) except (ValueError, __HOLE__): raise Exception( 'StatusBasedForward middleware ' 'received an invalid status code %s'%repr(status_code[0]) ) message = ' '.join(status_code[1:]) new_url = self.mapper( code, message, environ, self.global_conf, **self.params ) if not (new_url == None or isinstance(new_url, str)): raise TypeError( 'Expected the url to internally ' 'redirect to in the StatusBasedForward mapper' 'to be a string or None, not %r' % new_url) if new_url: url.append([new_url, status, headers]) # We have to allow the app to write stuff, even though # we'll ignore it: return [].append else: return start_response(status, headers, exc_info) app_iter = self.application(environ, change_response) if url: if hasattr(app_iter, 'close'): app_iter.close() def factory(app): return StatusKeeper(app, status=url[0][1], url=url[0][0], headers=url[0][2]) raise ForwardRequestException(factory=factory) else: return app_iter
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/errordocument.py/StatusBasedForward.__call__
8,460
def make_errordocument(app, global_conf, **kw): """ Paste Deploy entry point to create a error document wrapper. Use like:: [filter-app:main] use = egg:Paste#errordocument next = real-app 500 = /lib/msg/500.html 404 = /lib/msg/404.html """ map = {} for status, redir_loc in kw.items(): try: status = int(status) except __HOLE__: raise ValueError('Bad status code: %r' % status) map[status] = redir_loc forwarder = forward(app, map) return forwarder
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/errordocument.py/make_errordocument
8,461
def __call__(self, environ, start_response): url = [] code_message = [] try: def change_response(status, headers, exc_info=None): new_url = None parts = status.split(' ') try: code = int(parts[0]) except (ValueError, __HOLE__): raise Exception( '_StatusBasedRedirect middleware ' 'received an invalid status code %s'%repr(parts[0]) ) message = ' '.join(parts[1:]) new_url = self.mapper( code, message, environ, self.global_conf, self.kw ) if not (new_url == None or isinstance(new_url, str)): raise TypeError( 'Expected the url to internally ' 'redirect to in the _StatusBasedRedirect error_mapper' 'to be a string or None, not %s'%repr(new_url) ) if new_url: url.append(new_url) code_message.append([code, message]) return start_response(status, headers, exc_info) app_iter = self.application(environ, change_response) except: try: import sys error = str(sys.exc_info()[1]) except: error = '' try: code, message = code_message[0] except: code, message = ['', ''] environ['wsgi.errors'].write( 'Error occurred in _StatusBasedRedirect ' 'intercepting the response: '+str(error) ) return [self.fallback_template % {'message': message, 'code': code}] else: if url: url_ = url[0] new_environ = {} for k, v in environ.items(): if k != 'QUERY_STRING': new_environ['QUERY_STRING'] = urlparse.urlparse(url_)[4] else: new_environ[k] = v class InvalidForward(Exception): pass def eat_start_response(status, headers, exc_info=None): """ We don't want start_response to do anything since it has already been called """ if status[:3] != '200': raise InvalidForward( "The URL %s to internally forward " "to in order to create an error document did not " "return a '200' status code." % url_ ) forward = environ['paste.recursive.forward'] old_start_response = forward.start_response forward.start_response = eat_start_response try: app_iter = forward(url_, new_environ) except InvalidForward: code, message = code_message[0] environ['wsgi.errors'].write( 'Error occurred in ' '_StatusBasedRedirect redirecting ' 'to new URL: '+str(url[0]) ) return [ self.fallback_template%{ 'message':message, 'code':code, } ] else: forward.start_response = old_start_response return app_iter else: return app_iter
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/errordocument.py/_StatusBasedRedirect.__call__
8,462
def _run_policies(self, envelope): results = [envelope] def recurse(current, i): try: policy = self.queue_policies[i] except __HOLE__: return ret = policy.apply(current) if ret: results.remove(current) results.extend(ret) for env in ret: recurse(env, i+1) else: recurse(current, i+1) recurse(envelope, 0) return results
IndexError
dataset/ETHPy150Open slimta/python-slimta/slimta/queue/__init__.py/Queue._run_policies
8,463
def _dequeue(self, id): try: envelope, attempts = self.store.get(id) except __HOLE__: return if id not in self.active_ids: self.active_ids.add(id) self._pool_spawn('relay', self._attempt, id, envelope, attempts)
KeyError
dataset/ETHPy150Open slimta/python-slimta/slimta/queue/__init__.py/Queue._dequeue
8,464
def _wait_store(self): while True: try: for entry in self.store.wait(): self._add_queued(entry) except __HOLE__: return
NotImplementedError
dataset/ETHPy150Open slimta/python-slimta/slimta/queue/__init__.py/Queue._wait_store
8,465
def _wait_ready(self, now): try: first = self.queued[0] except __HOLE__: self.wake.wait() self.wake.clear() return first_timestamp = first[0] if first_timestamp > now: self.wake.wait(first_timestamp-now) self.wake.clear()
IndexError
dataset/ETHPy150Open slimta/python-slimta/slimta/queue/__init__.py/Queue._wait_ready
8,466
def parse_argspec(obj_or_str): if isinstance(obj_or_str, basestring): obj_or_str = obj_or_str.strip() if not obj_or_str.endswith(":"): obj_or_str += ":" if not obj_or_str.startswith("def "): obj_or_str = "def " + obj_or_str try: tree = ast.parse(obj_or_str + "\n pass") except SyntaxError: # cannot parse the argspec print "*** CANNOT PARSE", obj_or_str return [] argspec_name = tree.body[0].name argspec_args = [a.id for a in tree.body[0].args.args] print tree.body[0].args.defaults argspec_defaults = [] for i, d in enumerate(tree.body[0].args.defaults): try: d_val = ast.literal_eval(d) except __HOLE__: d_val = None argspec_defaults.append(d_val) else: argspec = inspect.getargspec(obj_or_str) argspec_args = argspec.args argspec_defaults = argspec.defaults if not argspec_defaults: start_defaults = len(argspec_args) + 1 else: start_defaults = len(argspec_args) - len(argspec_defaults) port_specs_list = [] has_self = False for i, arg in enumerate(argspec_args): if i == 0 and arg == "self": has_self = True continue port_spec = InputPortSpec(arg) port_spec.arg_pos = (i-1) if has_self else i if i >= start_defaults: port_spec.required = False default_val = argspec_defaults[i-start_defaults] if default_val is not None: port_spec.defaults = [default_val] port_type = get_type_from_val(default_val) if port_type is not None: port_spec.port_type = port_type else: port_spec.required = True port_specs_list.append(port_spec) return port_specs_list
ValueError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/packages/matplotlib/parse.py/parse_argspec
8,467
def parse_plots(plot_types, table_overrides): def get_module_base(n): return n def get_super_base(n): return "plot" module_specs = [] for plot in plot_types: port_specs = {} print "========================================" print plot print "========================================" (plot, module_name, super_name) = \ get_names(plot, get_module_base, get_super_base, "Mpl", "") try: plot_obj = getattr(matplotlib.pyplot, plot) except __HOLE__: print '*** CANNOT ADD PLOT "%s";' \ 'IT DOES NOT EXIST IN THIS MPL VERSION ***' % plot continue port_specs_list = parse_argspec(plot_obj) for port_spec in port_specs_list: port_specs[port_spec.arg] = port_spec docstring = plot_obj.__doc__ if plot == 'contour': # want to change the double newline to single newline... print "&*&* FINDING:", \ docstring.find("*extent*: [ *None* | (x0,x1,y0,y1) ]\n\n") docstring = docstring.replace("*extent*: [ *None* | (x0,x1,y0,y1) ]\n\n", "*extent*: [ *None* | (x0,x1,y0,y1) ]\n") if plot == 'annotate': docstring = docstring % dict((k,v) for k, v in matplotlib.docstring.interpd.params.iteritems() if k == 'Annotation') elif plot == 'barbs': docstring = docstring % dict((k,v) for k,v in matplotlib.docstring.interpd.params.iteritems() if k == 'barbs_doc') cleaned_docstring, output_port_specs = \ process_docstring(docstring, port_specs, ('pyplot', plot), table_overrides) # for port_spec in port_specs.itervalues(): # if port_spec.defaults is not None: # port_spec.defaults = [str(v) for v in port_spec.defaults] # if port_spec.values is not None: # port_spec.values = [[str(v) for v in port_spec.values[0]]] # for alt_ps in port_spec.alternate_specs: # if alt_ps.defaults is not None: # alt_ps.defaults = [str(v) for v in alt_ps.defaults] # if alt_ps.values is not None: # alt_ps.values = [[str(v) for v in alt_ps.values[0]]] module_specs.append(ModuleSpec(module_name, super_name, "matplotlib.pyplot.%s" % plot, cleaned_docstring, port_specs.values(), output_port_specs)) my_specs = SpecList(module_specs) return my_specs
AttributeError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/packages/matplotlib/parse.py/parse_plots
8,468
def info(name): ''' Return information about a group CLI Example: .. code-block:: bash salt '*' group.info foo ''' try: grinfo = grp.getgrnam(name) except __HOLE__: return {} else: return _format_info(grinfo)
KeyError
dataset/ETHPy150Open saltstack/salt/salt/modules/groupadd.py/info
8,469
def get_related_tags(self, request, **kwargs): """ Can be used to get all tags used by all CommitteeMeetings of a specific committee """ try: ctype = ContentType.objects.get_by_natural_key(kwargs['app_label'], kwargs['object_type']) except ContentType.DoesNotExist: raise Http404('Object type not found.') model = ctype.model_class() container = get_object_or_404(model, pk=kwargs['object_id']) try: related_objects = getattr(container, kwargs['related_name']).all() except __HOLE__: raise Http404('Related name not found.') tags = Tag.objects.usage_for_queryset(related_objects) return self._create_response(request, tags)
AttributeError
dataset/ETHPy150Open ofri/Open-Knesset/auxiliary/api.py/TagResource.get_related_tags
8,470
def _plot_sources_raw(ica, raw, picks, exclude, start, stop, show, title, block): """Function for plotting the ICA components as raw array.""" color = _handle_default('color', (0., 0., 0.)) orig_data = ica._transform_raw(raw, 0, len(raw.times)) * 0.2 if picks is None: picks = range(len(orig_data)) types = ['misc' for _ in picks] picks = list(sorted(picks)) eog_chs = pick_types(raw.info, meg=False, eog=True, ref_meg=False) ecg_chs = pick_types(raw.info, meg=False, ecg=True, ref_meg=False) data = [orig_data[pick] for pick in picks] c_names = ['ICA %03d' % x for x in range(len(orig_data))] for eog_idx in eog_chs: c_names.append(raw.ch_names[eog_idx]) types.append('eog') for ecg_idx in ecg_chs: c_names.append(raw.ch_names[ecg_idx]) types.append('ecg') extra_picks = np.append(eog_chs, ecg_chs).astype(int) if len(extra_picks) > 0: eog_ecg_data, _ = raw[extra_picks, :] for idx in range(len(eog_ecg_data)): if idx < len(eog_chs): eog_ecg_data[idx] /= 150e-6 # scaling for eog else: eog_ecg_data[idx] /= 5e-4 # scaling for ecg data = np.append(data, eog_ecg_data, axis=0) for idx in range(len(extra_picks)): picks = np.append(picks, ica.n_components_ + idx) if title is None: title = 'ICA components' info = create_info([c_names[x] for x in picks], raw.info['sfreq']) info['bads'] = [c_names[x] for x in exclude] if start is None: start = 0 if stop is None: stop = start + 20 stop = min(stop, raw.times[-1]) duration = stop - start if duration <= 0: raise RuntimeError('Stop must be larger than start.') t_end = int(duration * raw.info['sfreq']) times = raw.times[0:t_end] bad_color = (1., 0., 0.) inds = list(range(len(picks))) data = np.array(data) n_channels = min([20, len(picks)]) params = dict(raw=raw, orig_data=data, data=data[:, 0:t_end], ch_start=0, t_start=start, info=info, duration=duration, ica=ica, n_channels=n_channels, times=times, types=types, n_times=raw.n_times, bad_color=bad_color, picks=picks) _prepare_mne_browse_raw(params, title, 'w', color, bad_color, inds, n_channels) params['scale_factor'] = 1.0 params['plot_fun'] = partial(_plot_raw_traces, params=params, inds=inds, color=color, bad_color=bad_color) params['update_fun'] = partial(_update_data, params) params['pick_bads_fun'] = partial(_pick_bads, params=params) params['label_click_fun'] = partial(_label_clicked, params=params) _layout_figure(params) # callbacks callback_key = partial(_plot_raw_onkey, params=params) params['fig'].canvas.mpl_connect('key_press_event', callback_key) callback_scroll = partial(_plot_raw_onscroll, params=params) params['fig'].canvas.mpl_connect('scroll_event', callback_scroll) callback_pick = partial(_mouse_click, params=params) params['fig'].canvas.mpl_connect('button_press_event', callback_pick) callback_resize = partial(_helper_raw_resize, params=params) params['fig'].canvas.mpl_connect('resize_event', callback_resize) callback_close = partial(_close_event, params=params) params['fig'].canvas.mpl_connect('close_event', callback_close) params['fig_proj'] = None params['event_times'] = None params['update_fun']() params['plot_fun']() try: plt_show(show, block=block) except __HOLE__: # not all versions have this plt_show(show) return params['fig']
TypeError
dataset/ETHPy150Open mne-tools/mne-python/mne/viz/ica.py/_plot_sources_raw
8,471
def _plot_sources_epochs(ica, epochs, picks, exclude, start, stop, show, title, block): """Function for plotting the components as epochs.""" data = ica._transform_epochs(epochs, concatenate=True) eog_chs = pick_types(epochs.info, meg=False, eog=True, ref_meg=False) ecg_chs = pick_types(epochs.info, meg=False, ecg=True, ref_meg=False) c_names = ['ICA %03d' % x for x in range(ica.n_components_)] ch_types = np.repeat('misc', ica.n_components_) for eog_idx in eog_chs: c_names.append(epochs.ch_names[eog_idx]) ch_types = np.append(ch_types, 'eog') for ecg_idx in ecg_chs: c_names.append(epochs.ch_names[ecg_idx]) ch_types = np.append(ch_types, 'ecg') extra_picks = np.append(eog_chs, ecg_chs).astype(int) if len(extra_picks) > 0: eog_ecg_data = np.concatenate(epochs.get_data()[:, extra_picks], axis=1) data = np.append(data, eog_ecg_data, axis=0) scalings = _handle_default('scalings_plot_raw') scalings['misc'] = 5.0 info = create_info(ch_names=c_names, sfreq=epochs.info['sfreq'], ch_types=ch_types) info['projs'] = list() info['bads'] = [c_names[x] for x in exclude] if title is None: title = 'ICA components' if picks is None: picks = list(range(ica.n_components_)) if start is None: start = 0 if stop is None: stop = start + 20 stop = min(stop, len(epochs.events)) for idx in range(len(extra_picks)): picks = np.append(picks, ica.n_components_ + idx) n_epochs = stop - start if n_epochs <= 0: raise RuntimeError('Stop must be larger than start.') params = {'ica': ica, 'epochs': epochs, 'info': info, 'orig_data': data, 'bads': list(), 'bad_color': (1., 0., 0.), 't_start': start * len(epochs.times)} params['label_click_fun'] = partial(_label_clicked, params=params) _prepare_mne_browse_epochs(params, projs=list(), n_channels=20, n_epochs=n_epochs, scalings=scalings, title=title, picks=picks, order=['misc', 'eog', 'ecg']) params['plot_update_proj_callback'] = _update_epoch_data _update_epoch_data(params) params['hsel_patch'].set_x(params['t_start']) callback_close = partial(_close_epochs_event, params=params) params['fig'].canvas.mpl_connect('close_event', callback_close) try: plt_show(show, block=block) except __HOLE__: # not all versions have this plt_show(show) return params['fig']
TypeError
dataset/ETHPy150Open mne-tools/mne-python/mne/viz/ica.py/_plot_sources_epochs
8,472
def list_projects(folders, folder = None, user = None): '''List all folders or all subfolders of a folder. If folder is provided, this method will output a list of subfolders contained by it. Otherwise, a list of all top-level folders is produced. :param folders: reference to folder.Folders instance :param folder: folder name or None :param user: optional user name ''' fid = None if folder is None else Folders.name_to_id(folder) # List all folders if none provided. if fid is None: for f in folders.folders(user): print(Folders.id_to_name(f)) return # List subfolders of a specific folder try: for sid in folders.subfolders(fid, user): print(Folders.id_to_name(sid)) except __HOLE__: print("E: folder not found: %s" %folder, file=sys.stderr)
KeyError
dataset/ETHPy150Open dossier/dossier.models/dossier/models/query.py/list_projects
8,473
def run(hide=False, more=False, start="01-01-2012", end=None): """Update local game data.""" # get today's information year = date.today().year month = date.today().month day = date.today().day # get ending date information if end != None: end_month, end_day, end_year = end.split("-") end_month, end_day, end_year = [int(end_month), int(end_day), int(end_year)] else: end_year = year end_month = month end_day = day # get starting date information start_month, start_day, start_year = start.split("-") first_day, first_month, last_month = [True, True, False] # print a message becuase sometimes it seems like the program is not doing anything if not hide: print("Checking local data...") # looping years for i in range(int(start_year), end_year+1): # checking if starting month value needs to be used if first_month: ms = int(start_month) first_month = False else: ms = 1 # looping months me = 13 if i == end_year: me = end_month+1 last_month = True for x in range(ms, me): monthstr = str(x).zfill(2) loading = False if i == year and x > month: break # checking if starting day value needs to be used if first_day: ds = int(start_day) first_day = False else: ds = 1 # looping days de = 32 if last_month: de = end_day+1 for y in range(ds, de): if i == year and x >= month and y >= day: break daystr = str(y).zfill(2) # file information filename = "gameday-data/year_%i/month_%s/day_%s/scoreboard.xml.gz" % (i, monthstr, daystr) f = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename) dirn = "gameday-data/year_%i/month_%s/day_%s" % (i, monthstr, daystr) dirname = os.path.join(os.path.dirname(os.path.abspath(__file__)), dirn) # check if file exists # aka is the data saved if not os.path.isfile(f): # try becuase some dates may not have a file on the mlb.com server # or some months don't have a 31st day try: # get data from url data = urlopen("http://gd2.mlb.com/components/game/mlb/year_%i/month_%s/day_%s/scoreboard.xml" % (i, monthstr, daystr)) # loding bar to show something is actually happening if not hide: sys.stdout.write('Loading games for %s-%d (%00.2f%%) \r' % (monthstr, i, y/31.0*100)) sys.stdout.flush() loading = True response = data.read() # check if the path exists where the file should go if not os.path.exists(dirname): try: # try to make the folder if permissions allow os.makedirs(dirname) except OSError: access_error(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'gameday-data/')) try: # try to create the file if permissions allow with gzip.open(f, "w") as fi: fi.write(response) except OSError: access_error(dirname) # do nothing if the file is not on mlb.com except HTTPError: pass # get extra data if specified if more: try: # get the data for games on this day games = mlbgame.day(i, x, y) for z in games: # get the game id which is used to fetch data game_id = z.game_id # file information filename2 = "gameday-data/year_%i/month_%s/day_%s/gid_%s/boxscore.xml.gz" % (i, monthstr, daystr, game_id) f2 = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename2) dirn2 = "gameday-data/year_%i/month_%s/day_%s/gid_%s" % (i, monthstr, daystr, game_id) dirname2 = os.path.join(os.path.dirname(os.path.abspath(__file__)), dirn2) # check if file exists # aka is the information saved if not os.path.isfile(f2): # try becuase some dates may not have a file on the mlb.com server # or some months don't have a 31st day try: # get data data2 = urlopen("http://gd2.mlb.com/components/game/mlb/year_%i/month_%s/day_%s/gid_%s/boxscore.xml" % (i, monthstr, daystr, game_id)) if not hide: # progress sys.stdout.write('Loading games for %s-%d (%00.2f%%). \r' % (monthstr, i, y/31.0*100)) sys.stdout.flush() loading = True response2 = data2.read() # checking if files exist and writing new files if not os.path.exists(dirname2): try: os.makedirs(dirname2) except OSError: access_error(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'gameday-data/')) # try to write file try: with gzip.open(f2, "w") as fi: fi.write(response2) except __HOLE__: access_error(dirname2) except HTTPError: pass except: pass if loading and not hide: # make sure loading ends at 100% sys.stdout.write('Loading games for %s-%d (100.00%%).\n' % (monthstr, i)) sys.stdout.flush() # print finished message if not hide: print("Complete.")
OSError
dataset/ETHPy150Open zachpanz88/mlbgame/mlbgame/update.py/run
8,474
def _yaml_include(self, loader, node): """ Include another yaml file from main file This is usually done by registering !include tag """ filepath = node.value if not os.path.exists(filepath): for dir in self.conf_dirs: filepath = os.path.join(dir, node.value) if os.path.exists(filepath): break self.conf_dirs.append(os.path.dirname(filepath)) try: with open(filepath, 'r') as inputfile: return yaml.load(inputfile) except __HOLE__ as e: lg.error("Can't include config file %s: %s" % (filepath, e)) raise
IOError
dataset/ETHPy150Open gooddata/smoker/smoker/server/daemon.py/Smokerd._yaml_include
8,475
def _load_config(self): """ Load specified config file """ try: with open(self.conf['config'], 'r') as fp: config = fp.read() except __HOLE__ as e: lg.error("Can't read config file %s: %s" % (self.conf['config'], e)) raise # Register include constructors yaml.add_constructor('!include_dir', self._yaml_include_dir) yaml.add_constructor('!include', self._yaml_include) try: conf = yaml.load(config) except Exception as e: lg.error("Can't parse config file %s: %s" % (self.conf['config'], e)) raise finally: fp.close() # Store parameters but don't overwite # those submitted by command line for key, value in conf.iteritems(): if self.conf.has_key(key): # User has submitted own parameter, # use that instead of config one lg.debug("Using parameter %s from user, ignoring config file value" % key) else: self.conf[key] = value
IOError
dataset/ETHPy150Open gooddata/smoker/smoker/server/daemon.py/Smokerd._load_config
8,476
def run(self): """ Run daemon * change effective uid/gid * start thread for each check * start webserver """ lg.info("Starting daemon") # Change effective UID/GID if self.conf.has_key('uid') and self.conf.has_key('gid'): if os.geteuid != self.conf['uid'] and os.getegid != self.conf['gid']: try: os.setegid(self.conf['gid']) os.seteuid(self.conf['uid']) except TypeError as e: lg.error("Config parameters uid/gid have to be integers: %s" % e) except OSError as e: lg.error("Can't switch effective UID/GID to %s/%s: %s" % (self.conf['uid'], self.conf['gid'], e)) lg.exception(e) self._shutdown(exitcode=1) else: lg.info("Not changing effective UID/GID, keeping %s/%s" % (os.geteuid(), os.getegid())) if not isinstance(self.conf['bind_port'], int): lg.error("Config parameter bind_port has to be integer") # Initialize plugin manager config = {} for key in ['plugins', 'templates', 'actions']: try: config[key] = self.conf[key] except __HOLE__ as e: lg.warn("Config section not found: %s" % e) # Check we have some plugins configured if not config['plugins']: lg.error('No configured plugins') self._shutdown(exitcode=1) if 'nr_concurrent_plugins' in self.conf: config['semaphore_count'] = self.conf['nr_concurrent_plugins'] try: self.pluginmgr = PluginManager(**config) except Exception as e: lg.error("Can't initialize PluginManager") lg.exception(e) self._shutdown(exitcode=1) lg.info("Starting webserver on %(bind_host)s:%(bind_port)s" % self.conf) try: self.server = RestServer(self.conf['bind_host'], self.conf['bind_port'], self) self.server.start() except Exception as e: lg.error("Can't start HTTP server: %s" % e) lg.exception(e) self._shutdown(exitcode=1) # Catch SIGINT and SIGTERM if supported if hasattr(signal, 'SIGINT'): signal.signal(signal.SIGINT, self._shutdown) if hasattr(signal, 'SIGTERM'): signal.signal(signal.SIGTERM, self._shutdown) if hasattr(signal, 'SIGHUP'): signal.signal(signal.SIGHUP, self._reopen_logfiles) self._watchdog()
KeyError
dataset/ETHPy150Open gooddata/smoker/smoker/server/daemon.py/Smokerd.run
8,477
def validate_start_time(value): try: datetime.strptime(value, '%d.%m.%Y %H:%M') except __HOLE__: raise DjangoValidationError(_('Invalid input.'))
ValueError
dataset/ETHPy150Open OpenSlides/OpenSlides/openslides/agenda/signals.py/validate_start_time
8,478
def __getattr__(self, key): try: return self[key] except __HOLE__, k: return None
KeyError
dataset/ETHPy150Open limodou/uliweb/uliweb/utils/storage.py/Storage.__getattr__
8,479
def __delattr__(self, key): try: del self[key] except __HOLE__, k: raise AttributeError, k
KeyError
dataset/ETHPy150Open limodou/uliweb/uliweb/utils/storage.py/Storage.__delattr__