desc stringlengths 3 26.7k | decl stringlengths 11 7.89k | bodies stringlengths 8 553k |
|---|---|---|
'Build a string of named and positional arguments which are passed to the
script.
:param named_args: Dictionary with named arguments.
:type named_args: ``dict``.
:param positional_args: List with positional arguments.
:type positional_args: ``dict``.
:rtype: ``str``'
| def _get_script_arguments(self, named_args=None, positional_args=None):
| command_parts = []
if (named_args is not None):
for (arg, value) in six.iteritems(named_args):
if ((value is None) or (isinstance(value, (str, unicode)) and (len(value) < 1))):
LOG.debug('Ignoring arg %s as its value is %s.', arg, value)
c... |
'Given a key ``name`` and ``user``, this method returns a new name (string ref)
to address the key value pair in the context of that user.
:param user: User to whom key belongs.
:type name: ``str``
:param name: Original name of the key.
:type name: ``str``
:rtype: ``str``'
| @staticmethod
def to_string_reference(user, name):
| if ((not user) or (not name)):
raise ValueError('Both "user" and "name" must be valid to generate ref.')
return UserKeyReference(user=user, name=name).ref
|
'Given a user key ``reference``, this method returns the user and actual name of the key.
:param ref: Reference to user key.
:type ref: ``str``
:rtype: ``tuple`` of ``str`` and ``str``'
| @staticmethod
def from_string_reference(ref):
| user = UserKeyReference.get_user(ref)
name = UserKeyReference.get_name(ref)
return (user, name)
|
'Given a user key ``reference``, this method returns the user to whom the key belongs.
:param ref: Reference to user key.
:type ref: ``str``
:rtype: ``str``'
| @staticmethod
def get_user(ref):
| try:
return ref.split(USER_SEPARATOR, 1)[0]
except (IndexError, AttributeError):
raise InvalidUserKeyReferenceError(ref=ref)
|
'Given a user key ``reference``, this method returns the name of the key.
:param ref: Reference to user key.
:type ref: ``str``
:rtype: ``str``'
| @staticmethod
def get_name(ref):
| try:
return ref.split(USER_SEPARATOR, 1)[1]
except (IndexError, AttributeError):
raise InvalidUserKeyReferenceError(ref=ref)
|
'Convert a `datetime` object to number of microseconds since epoch representation
(which will be stored in MongoDB). This is the reverse function of
`_convert_from_db`.'
| def _convert_from_datetime(self, val):
| result = self._datetime_to_microseconds_since_epoch(value=val)
return result
|
'Convert a number representation to a `datetime` object (the object you
will manipulate). This is the reverse function of
`_convert_from_datetime`.
:param data: Number of microseconds since the epoch.
:type data: ``int``'
| def _microseconds_since_epoch_to_datetime(self, data):
| result = datetime.datetime.utcfromtimestamp((data // SECOND_TO_MICROSECONDS))
microseconds_reminder = (data % SECOND_TO_MICROSECONDS)
result = result.replace(microsecond=microseconds_reminder)
result = date_utils.add_utc_tz(result)
return result
|
'Convert datetime in UTC to number of microseconds from epoch.
Note: datetime which is passed to the function needs to be in UTC timezone (e.g. as returned
by ``datetime.datetime.utcnow``).
:rtype: ``int``'
| def _datetime_to_microseconds_since_epoch(self, value):
| if ((not value.tzinfo) or (value.tzinfo.utcoffset(value) != datetime.timedelta(0))):
raise ValueError('Value passed to this function needs to be in UTC timezone')
seconds = calendar.timegm(value.timetuple())
microseconds_reminder = value.time().microsecond
result = ... |
'Retrieve a list of runners in the provided directories.
:return: Dictionary where the key is runner name and the value is full path to the runner
directory.
:rtype: ``dict``'
| def get_runners(self, base_dirs):
| assert isinstance(base_dirs, list)
result = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
runners_in_dir = self._get_runners_from_dir(base_dir=base_dir)
result.update(runners_in_dir)... |
'Retrieve a list of packs in the provided directories.
:return: Dictionary where the key is pack name and the value is full path to the pack
directory.
:rtype: ``dict``'
| def get_packs(self, base_dirs):
| assert isinstance(base_dirs, list)
result = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
packs_in_dir = self._get_packs_from_dir(base_dir=base_dir)
result.update(packs_in_dir)
r... |
'Retrieve content from the provided directories.
Provided directories are searched from left to right. If a pack with the same name exists
in multiple directories, first pack which is found wins.
:param base_dirs: Directories to look into.
:type base_dirs: ``list``
:param content_type: Content type to look for (sensors... | def get_content(self, base_dirs, content_type):
| assert isinstance(base_dirs, list)
if (content_type not in self.ALLOWED_CONTENT_TYPES):
raise ValueError(('Unsupported content_type: %s' % content_type))
content = {}
pack_to_dir_map = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(... |
'Retrieve content from the provided pack directory.
:param pack_dir: Path to the pack directory.
:type pack_dir: ``str``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``str``'
| def get_content_from_pack(self, pack_dir, content_type):
| if (content_type not in self.ALLOWED_CONTENT_TYPES):
raise ValueError(('Unsupported content_type: %s' % content_type))
if (not os.path.isdir(pack_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % pack_dir))
content = self._get_content_from_pack_dir(pack_dir=pack_dir, ... |
'Retrieve a list of runners in the provided directories.
:return: Dictionary where the key is runner name and the value is full path to the runner
directory.
:rtype: ``dict``'
| def get_runners(self, base_dirs):
| assert isinstance(base_dirs, list)
result = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
runners_in_dir = self._get_runners_from_dir(base_dir=base_dir)
result.update(runners_in_dir)... |
'Loads content from file_path if file_path\'s extension
is one of allowed ones (See ALLOWED_EXTS).
Throws UnsupportedMetaException on disallowed filetypes.
Throws ValueError on malformed meta.
:param file_path: Absolute path to the file to load content from.
:type file_path: ``str``
:param expected_type: Expected type ... | def load(self, file_path, expected_type=None):
| (file_name, file_ext) = os.path.splitext(file_path)
if (file_ext not in ALLOWED_EXTS):
raise Exception(('Unsupported meta type %s, file %s. Allowed: %s' % (file_ext, file_path, ALLOWED_EXTS)))
result = self._load(PARSER_FUNCS[file_ext], file_path)
if (expected_type and (not ... |
':return: Dict with the following keys: roles, role_assiginments
:rtype: ``dict``'
| def load(self):
| result = {}
result['roles'] = self.load_role_definitions()
result['role_assignments'] = self.load_user_role_assignments()
result['group_to_role_maps'] = self.load_group_to_role_maps()
return result
|
'Load all the role definitions.
:rtype: ``dict``'
| def load_role_definitions(self):
| LOG.info(('Loading role definitions from "%s"' % self._role_definitions_path))
file_paths = self._get_role_definitions_file_paths()
result = {}
for file_path in file_paths:
LOG.debug(('Loading role definition from: %s' % file_path))
role_definition_api = self.load... |
'Load all the user role assignments.
:rtype: ``dict``'
| def load_user_role_assignments(self):
| LOG.info(('Loading user role assignments from "%s"' % self._role_assignments_path))
file_paths = self._get_role_assiginments_file_paths()
result = {}
for file_path in file_paths:
LOG.debug(('Loading user role assignments from: %s' % file_path))
role_assignme... |
'Load all the remote group to local role mappings.
:rtype: ``dict``'
| def load_group_to_role_maps(self):
| LOG.info(('Loading group to role map definitions from "%s"' % self._role_maps_path))
file_paths = self._get_group_to_role_maps_file_paths()
result = {}
for file_path in file_paths:
LOG.debug(('Loading group to role mapping from: %s' % file_path))
gr... |
'Load role definition from file.
:param file_path: Path to the role definition file.
:type file_path: ``str``
:return: Role definition.
:rtype: :class:`RoleDefinitionFileFormatAPI`'
| def load_role_definition_from_file(self, file_path):
| content = self._meta_loader.load(file_path)
if (not content):
msg = ('Role definition file "%s" is empty and invalid' % file_path)
raise ValueError(msg)
role_definition_api = RoleDefinitionFileFormatAPI(**content)
role_definition_api = role_definition_api.validate()
... |
'Load user role assignments from file.
:param file_path: Path to the user role assignment file.
:type file_path: ``str``
:return: User role assignments.
:rtype: :class:`UserRoleAssignmentFileFormatAPI`'
| def load_user_role_assignments_from_file(self, file_path):
| content = self._meta_loader.load(file_path)
if (not content):
msg = ('Role assignment file "%s" is empty and invalid' % file_path)
raise ValueError(msg)
user_role_assignment_api = UserRoleAssignmentFileFormatAPI(**content)
user_role_assignment_api = user_role_assignm... |
'Retrieve a list of paths for all the role definitions.
Notes: Roles are sorted in an alphabetical order based on the role name.
:rtype: ``list``'
| def _get_role_definitions_file_paths(self):
| glob_str = (self._role_definitions_path + '*.yaml')
file_paths = glob.glob(glob_str)
file_paths = sorted(file_paths, cmp=compare_path_file_name)
return file_paths
|
'Retrieve a list of paths for all the user role assignments.
Notes: Assignments are sorted in an alphabetical order based on the username.
:rtype: ``list``'
| def _get_role_assiginments_file_paths(self):
| glob_str = (self._role_assignments_path + '*.yaml')
file_paths = glob.glob(glob_str)
file_paths = sorted(file_paths, cmp=compare_path_file_name)
return file_paths
|
'Retrieve a list of path for remote group to local role mapping assignment files.
:rtype: ``list``'
| def _get_group_to_role_maps_file_paths(self):
| glob_str = (self._role_maps_path + '*.yaml')
file_paths = glob.glob(glob_str)
file_paths = sorted(file_paths, cmp=compare_path_file_name)
return file_paths
|
'Return valid permissions for the provided resource type.
:rtype: ``list``'
| @classmethod
def get_valid_permissions_for_resource_type(cls, resource_type):
| valid_permissions = RESOURCE_TYPE_TO_PERMISSION_TYPES_MAP[resource_type]
return valid_permissions
|
'Retrieve resource type from the provided permission type.
:rtype: ``str``'
| @classmethod
def get_resource_type(cls, permission_type):
| if (permission_type == PermissionType.PACK_VIEWS_INDEX_HEALTH):
return ResourceType.PACK
elif (permission_type == PermissionType.EXECUTION_VIEWS_FILTERS_LIST):
return ResourceType.EXECUTION
split = permission_type.split('_')
assert (len(split) >= 2)
return '_'.join(split[:(-1)])
|
'Retrieve permission name from the provided permission type.
:rtype: ``str``'
| @classmethod
def get_permission_name(cls, permission_type):
| split = permission_type.split('_')
assert (len(split) >= 2)
if (permission_type == PermissionType.PACK_VIEWS_INDEX_HEALTH):
split = permission_type.split('_', 1)
return split[1]
return split[(-1)]
|
'Retrieve a description for the provided permission_type.
:rtype: ``str``'
| @classmethod
def get_permission_description(cls, permission_type):
| description = PERMISION_TYPE_TO_DESCRIPTION_MAP[permission_type]
return description
|
'Retrieve permission type enum value for the provided resource type and permission name.
:rtype: ``str``'
| @classmethod
def get_permission_type(cls, resource_type, permission_name):
| if (resource_type == ResourceType.SENSOR):
resource_type = 'sensor'
permission_enum = ('%s_%s' % (resource_type.upper(), permission_name.upper()))
result = getattr(cls, permission_enum, None)
if (not result):
raise ValueError(('Unsupported permission type for type "%s" ... |
'Synchronize all the role definitions, user role assignments and remote group to local roles
maps.'
| def sync(self, role_definition_apis, role_assignment_apis, group_to_role_map_apis):
| result = {}
result['roles'] = self.sync_roles(role_definition_apis)
result['role_assignments'] = self.sync_users_role_assignments(role_assignment_apis)
result['group_to_role_maps'] = self.sync_group_to_role_maps(group_to_role_map_apis)
return result
|
'Synchronize all the role definitions in the database.
:param role_dbs: RoleDB objects for the roles which are currently in the database.
:type role_dbs: ``list`` of :class:`RoleDB`
:param role_definition_apis: RoleDefinition API objects for the definitions loaded from
the files.
:type role_definition_apis: ``list`` of... | def sync_roles(self, role_definition_apis):
| LOG.info('Synchronizing roles...')
role_dbs = rbac_services.get_all_roles(exclude_system=True)
role_db_names = [role_db.name for role_db in role_dbs]
role_db_names = set(role_db_names)
role_api_names = [role_definition_api.name for role_definition_api in role_definition_apis]
role_api_names =... |
'Synchronize role assignments for all the users in the database.
:param role_assignment_apis: Role assignments API objects for the assignments loaded
from the files.
:type role_assignment_apis: ``list`` of :class:`UserRoleAssignmentFileFormatAPI`
:return: Dictionary with created and removed role assignments for each us... | def sync_users_role_assignments(self, role_assignment_apis):
| assert isinstance(role_assignment_apis, (list, tuple))
LOG.info('Synchronizing users role assignments...')
role_assignment_dbs = rbac_services.get_all_role_assignments(include_remote=False)
user_dbs = User.get_all()
username_to_user_db_map = dict([(user_db.name, user_db) for user_db in user... |
'Synchronize role assignments for a particular user.
:param user_db: User to synchronize the assignments for.
:type user_db: :class:`UserDB`
:param role_assignment_dbs: Existing user role assignments.
:type role_assignment_dbs: ``list`` of :class:`UserRoleAssignmentDB`
:param role_assignment_api: Role assignment API fo... | def _sync_user_role_assignments(self, user_db, role_assignment_dbs, role_assignment_api):
| db_role_names = [role_assignment_db.role for role_assignment_db in role_assignment_dbs]
db_role_names = set(db_role_names)
api_role_names = (role_assignment_api.roles if role_assignment_api else [])
api_role_names = set(api_role_names)
new_role_names = api_role_names.difference(db_role_names)
up... |
':param user_db: User to sync the assignments for.
:type user: :class:`UserDB`
:param groups: A list of remote groups user is a member of.
:type groups: ``list`` of ``str``
:return: A list of mappings which have been created.
:rtype: ``list`` of :class:`UserRoleAssignmentDB`'
| def sync(self, user_db, groups):
| groups = list(set(groups))
extra = {'user_db': user_db, 'groups': groups}
LOG.info(('Synchronizing remote role assignments for user "%s"' % str(user_db)), extra=extra)
all_mapping_dbs = GroupToRoleMapping.query(group__in=groups)
enabled_mapping_dbs = [mapping_db for mapping_db in a... |
'Method for checking user permissions which are not tied to a particular resource.'
| def user_has_permission(self, user_db, permission_type):
| raise NotImplementedError()
|
'Method for checking user permissions on a resource which is to be created (e.g.
create operation).'
| def user_has_resource_api_permission(self, user_db, resource_api, permission_type):
| raise NotImplementedError()
|
'Method for checking user permissions on an existing resource (e.g. get one, edit, delete
operations).'
| def user_has_resource_db_permission(self, user_db, resource_db, permission_type):
| raise NotImplementedError()
|
'Common method for checking if a user has specific "list" resource permission (e.g.
rules_list, action_list, etc.).'
| def _user_has_list_permission(self, user_db, permission_type):
| assert (PermissionType.get_permission_name(permission_type) == 'list')
return self._user_has_global_permission(user_db=user_db, permission_type=permission_type)
|
'Custom method for checking if user has a particular global permission which doesn\'t apply
to a specific resource but it\'s system-wide aka global permission.'
| def _user_has_global_permission(self, user_db, permission_type):
| log_context = {'user_db': user_db, 'permission_type': permission_type, 'resolver': self.__class__.__name__}
self._log('Checking user permissions', extra=log_context)
has_system_role_permission = self._user_has_system_role_permission(user_db=user_db, permission_type=permission_type)
if has_system_r... |
'Check the user system roles and return True if user has the required permission.
:rtype: ``bool``'
| def _user_has_system_role_permission(self, user_db, permission_type):
| permission_name = PermissionType.get_permission_name(permission_type)
user_role_dbs = get_roles_for_user(user_db=user_db)
user_role_names = [role_db.name for role_db in user_role_dbs]
if (SystemRole.SYSTEM_ADMIN in user_role_names):
return True
elif (SystemRole.ADMIN in user_role_names):
... |
':rtype: ``bool``'
| def _matches_permission_grant(self, resource_db, permission_grant, permission_type, all_permission_type):
| if (permission_type in permission_grant.permission_types):
return True
elif (all_permission_type in permission_grant.permission_types):
return True
return False
|
'Retrieve "ALL" permission type for the provided resource.'
| def _get_all_permission_type_for_resource(self, resource_db):
| resource_type = resource_db.get_resource_type()
permission_type = PermissionType.get_permission_type(resource_type=resource_type, permission_name='all')
return permission_type
|
'Custom logger method which prefix message with the class and caller method name.'
| def _log(self, message, extra, level=stdlib_logging.DEBUG, **kwargs):
| class_name = self.__class__.__name__
method_name = sys._getframe().f_back.f_code.co_name
message_prefix = ('%s.%s: ' % (class_name, method_name))
message = (message_prefix + message)
LOG.log(level, message, extra=extra, **kwargs)
|
'Check if the user has access to the provided trigger.
This method is to be used during rule create and update where we check if the user has the
necessary trigger permissions.
Note: Right now we only support webhook triggers.
:param trigger: "trigger" attribute of the RuleAPI object.
:type trigger: ``dict``'
| def user_has_trigger_permission(self, user_db, trigger):
| log_context = {'user_db': user_db, 'trigger': trigger, 'resolver': self.__class__.__name__}
trigger_type = trigger['type']
trigger_parameters = trigger.get('parameters', {})
if (trigger_type != WEBHOOK_TRIGGER_TYPE):
self._log('Not a webhook trigger type, ignoring trigger pe... |
'Check if the user has "execute" permission on the provided action.'
| def user_has_action_permission(self, user_db, action_ref):
| pass
|
'The method is invoked on every request and shows the lifecycle of the request received from
the middleware.
Although some middleware may use parts of the API spec, it is safe to assume that if you\'re
looking for the particular spec property handler, it\'s most likely a part of this method.
At the time of writing, th... | def __call__(self, req):
| LOG.debug('Recieved call with WebOb: %s', req)
(endpoint, path_vars) = self.match(req)
LOG.debug('Parsed endpoint: %s', endpoint)
LOG.debug('Parsed path_vars: %s', path_vars)
context = copy.copy(getattr(self, 'mock_context', {}))
if ('security' in endpoint):
secur... |
'Converts WSGI request to webob.Request and initiates the response returned by controller.'
| def as_wsgi(self, environ, start_response):
| req = Request(environ)
resp = self(req)
return resp(environ, start_response)
|
'Return a dispatcher class which is used for dispatching triggers.'
| @classmethod
def _get_dispatcher(cls):
| from st2common.transport.reactor import TriggerDispatcher
if (not cls.dispatcher):
cls.dispatcher = TriggerDispatcher(LOG)
return cls.dispatcher
|
'Use this method when -
* upsert=False is desired
* special operators like push, push_all are to be used.'
| @classmethod
def update(cls, model_object, publish=True, dispatch_trigger=True, **kwargs):
| cls._get_impl().update(model_object, **kwargs)
model_object = cls.get_by_id(model_object.id)
if publish:
try:
cls.publish_update(model_object)
except:
LOG.exception('Publish failed.')
if dispatch_trigger:
try:
cls.dispatch_update_trigger(mod... |
'Dispatch a resource-specific trigger which indicates a new resource has been created.'
| @classmethod
def dispatch_create_trigger(cls, model_object):
| return cls._dispatch_operation_trigger(operation='create', model_object=model_object)
|
'Dispatch a resource-specific trigger which indicates an existing resource has been updated.'
| @classmethod
def dispatch_update_trigger(cls, model_object):
| return cls._dispatch_operation_trigger(operation='update', model_object=model_object)
|
'Dispatch a resource-specific trigger which indicates an existing resource has been
deleted.'
| @classmethod
def dispatch_delete_trigger(cls, model_object):
| return cls._dispatch_operation_trigger(operation='delete', model_object=model_object)
|
'Publish the object status to the message queue.
Publish the instance of the model as payload with the status
as routing key to the message queue via the StatePublisher.
:param model_object: An instance of the model.
:type model_object: ``object``'
| @classmethod
def publish_status(cls, model_object):
| publisher = cls._get_publisher()
if publisher:
publisher.publish_state(model_object, getattr(model_object, 'status', None))
|
'Note: We override add_or_update because we also want to publish high level "value_change"
event for this resource.'
| @classmethod
def add_or_update(cls, model_object, publish=True, dispatch_trigger=True):
| if model_object.id:
existing_model_object = cls.get_by_id(value=model_object.id)
else:
existing_model_object = None
model_object = super(KeyValuePair, cls).add_or_update(model_object=model_object, publish=publish, dispatch_trigger=dispatch_trigger)
if (existing_model_object and (existing... |
'Retrieve KeyValuePair objects for the provided key names.'
| @classmethod
def get_by_names(cls, names):
| return cls.query(name__in=names)
|
'Get a key value store given a scope and name.
:param scope: Scope which the key belongs to.
:type scope: ``str``
:param name: Name of the key.
:type key: ``str``
:rtype: :class:`KeyValuePairDB` or ``None``'
| @classmethod
def get_by_scope_and_name(cls, scope, name):
| query_result = cls.impl.query(scope=scope, name=name)
return (query_result.first() if query_result else None)
|
'Discover all the packs in the provided directory and register policies from all of the
discovered packs.
:return: Number of policies registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='policies')
for (pack, policies_dir) in six.iteritems(content):
if (not policies_dir):
LOG.debug('Pack %s does not contain policies.'... |
'Register all the policies from the provided pack.
:return: Number of policies registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
policies_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='policies')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not policies_... |
':param use_pack_cache: True to cache which packs have been registered in memory and making
sure packs are only registered once.
:type use_pack_cache: ``bool``
:param fail_on_failure: Throw an exception if resource registration fails.
:type fail_on_failure: ``bool``'
| def __init__(self, use_pack_cache=True, fail_on_failure=False):
| self._use_pack_cache = use_pack_cache
self._fail_on_failure = fail_on_failure
self._meta_loader = MetaLoader()
self._pack_loader = ContentPackLoader()
self._runner_loader = RunnersLoader()
|
'Return a list of registered packs.
:rype: ``list``'
| def get_registered_packs(self):
| return REGISTERED_PACKS_CACHE.keys()
|
'Register packs in all the provided directories.'
| def register_packs(self, base_dirs):
| packs = self._pack_loader.get_packs(base_dirs=base_dirs)
registered_count = 0
for (pack_name, pack_path) in six.iteritems(packs):
self.register_pack(pack_name=pack_name, pack_dir=pack_path)
registered_count += 1
return registered_count
|
'Register pack in the provided directory.'
| def register_pack(self, pack_name, pack_dir):
| if (self._use_pack_cache and (pack_name in REGISTERED_PACKS_CACHE)):
return
LOG.debug(('Registering pack: %s' % pack_name))
REGISTERED_PACKS_CACHE[pack_name] = True
try:
(pack_db, _) = self._register_pack(pack_name=pack_name, pack_dir=pack_dir)
except Exception as e:
if... |
'Register a pack and corresponding pack config schema (create a DB object in the system).
Note: Pack registration now happens when registering the content and not when installing
a pack using packs.install. Eventually this will be moved to the pack management API.'
| def _register_pack(self, pack_name, pack_dir):
| pack_db = self._register_pack_db(pack_name=pack_name, pack_dir=pack_dir)
config_path = os.path.join(pack_dir, 'config.yaml')
if os.path.isfile(config_path):
LOG.warning(('Pack "%s" contains a deprecated config.yaml file (%s). Support for "config.yaml" files has ... |
'Discover all the packs in the provided directory and register sensors from all of the
discovered packs.
:return: Number of sensors registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='sensors')
for (pack, sensors_dir) in six.iteritems(content):
if (not sensors_dir):
LOG.debug('Pack %s does not contain sensors.', pa... |
'Register all the sensors from the provided pack.
:return: Number of sensors registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
sensors_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='sensors')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not sensors_dir... |
'Discover all the packs in the provided directory and register actions from all of the
discovered packs.
:return: Number of actions registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='actions')
for (pack, actions_dir) in six.iteritems(content):
if (not actions_dir):
LOG.debug('Pack %s does not contain actions.', pa... |
'Register all the actions from the provided pack.
:return: Number of actions registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
actions_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='actions')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not actions_dir... |
'Discover all the packs in the provided directory and register triggers from all of the
discovered packs.
:return: Number of triggers registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='triggers')
for (pack, triggers_dir) in six.iteritems(content):
if (not triggers_dir):
LOG.debug('Pack %s does not contain triggers.'... |
'Register all the triggers from the provided pack.
:return: Number of triggers registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
triggers_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='triggers')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not triggers_... |
'Discover all the packs in the provided directory and register aliases from all of the
discovered packs.
:return: Number of aliases registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='aliases')
for (pack, aliases_dir) in six.iteritems(content):
if (not aliases_dir):
LOG.debug('Pack %s does not contain aliases.', pa... |
'Register all the aliases from the provided pack.
:return: Number of aliases registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
aliases_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='aliases')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not aliases_dir... |
'Retrieve ActionAliasDB object.'
| def _get_action_alias_db(self, pack, action_alias):
| content = self._meta_loader.load(action_alias)
pack_field = content.get('pack', None)
if (not pack_field):
content['pack'] = pack
pack_field = pack
if (pack_field != pack):
raise Exception(('Model is in pack "%s" but field "pack" is different: %s' % ... |
'Register configs for all the available packs.'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
packs = self._pack_loader.get_packs(base_dirs=base_dirs)
pack_names = packs.keys()
for pack_name in pack_names:
config_path = self._get_config_path_for_pack(pack_name=pack_name)
if (not os.path.isfile(config_path)):
... |
'Register config for a provided pack.'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack_name) = os.path.split(pack_dir)
self.register_pack(pack_name=pack_name, pack_dir=pack_dir)
config_path = self._get_config_path_for_pack(pack_name=pack_name)
if (not os.path.isfile(config_path)):
return 0
self._... |
':return: Number of rules registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='rules')
for (pack, rules_dir) in six.iteritems(content):
if (not rules_dir):
LOG.debug('Pack %s does not contain rules.', pack)
... |
'Register all the rules from the provided pack.
:return: Number of rules registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
rules_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='rules')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not rules_dir):
... |
':param id: Runner id.
:type id: ``str``'
| def __init__(self, runner_id):
| self.runner_id = runner_id
self.runner_type_db = None
self.container_service = None
self.runner_parameters = None
self.action = None
self.action_name = None
self.liveaction = None
self.liveaction_id = None
self.execution = None
self.execution_id = None
self.entry_point = None... |
'Retrieve pack name for the action which is being currently executed.
:rtype: ``str``'
| def get_pack_name(self):
| if self.action:
return self.action.pack
return DEFAULT_PACK_NAME
|
'Retrieve a name of the user which triggered this action execution.
:rtype: ``str``'
| def get_user(self):
| context = (getattr(self, 'context', {}) or {})
user = context.get('user', cfg.CONF.system_user.user)
return user
|
'Retrieve common ST2_ACTION_ environment variables which will be available to the action.
Note: Environment variables are prefixed with ST2_ACTION_* so they don\'t clash with CLI
environment variables.
:rtype: ``dict``'
| def _get_common_action_env_variables(self):
| result = {}
result['ST2_ACTION_PACK_NAME'] = self.get_pack_name()
result['ST2_ACTION_EXECUTION_ID'] = str(self.execution_id)
result['ST2_ACTION_API_URL'] = get_full_public_api_url()
if self.auth_token:
result['ST2_ACTION_AUTH_TOKEN'] = self.auth_token.token
return result
|
'Transform named arguments to the final form.
:param named_args: Named arguments.
:type named_args: ``dict``
:rtype: ``dict``'
| def _transform_named_args(self, named_args):
| if named_args:
return {(self._kwarg_op + k): v for (k, v) in six.iteritems(named_args)}
return None
|
':param action_parameters: Action parameters.
:type action_parameters: ``dict``
:return: (positional_args, named_args)
:rtype: (``str``, ``dict``)'
| def _get_script_args(self, action_parameters):
| is_script_run_as_cmd = self.runner_parameters.get(RUNNER_COMMAND, None)
pos_args = ''
named_args = {}
if is_script_run_as_cmd:
pos_args = self.runner_parameters.get(RUNNER_COMMAND, '')
named_args = action_parameters
else:
(pos_args, named_args) = action_utils.get_args(action_... |
':param config: Action config.
:type config: ``dict``
:param action_service: ActionService object.
:type action_service: :class:`ActionService~'
| def __init__(self, config=None, action_service=None):
| self.config = (config or {})
self.action_service = action_service
self.logger = get_logger_for_python_runner_action(action_name=self.__class__.__name__)
|
':param command: Samba command string.
:type command: ``str``
:param share: Samba share name.
:type share: ``str``'
| def _get_smbclient_command_args(self, host, username, password, command, share='C$', domain=None):
| args = ['smbclient']
values = {'domain': domain, 'username': username, 'password': password}
if domain:
auth_string = ('%(domain)s\\%(username)s%%%(password)s' % values)
else:
auth_string = ('%(username)s%%%(password)s' % values)
args += ['-U', auth_string]
args += [('//%(host)s/... |
':param pack: Name of the pack this action belongs to.
:type pack: ``str``
:param file_path: Path to the action module.
:type file_path: ``str``
:param parameters: action parameters.
:type parameters: ``dict`` or ``None``
:param user: Name of the user who triggered this action execution.
:type user: ``str``
:param pare... | def __init__(self, pack, file_path, parameters=None, user=None, parent_args=None):
| self._pack = pack
self._file_path = file_path
self._parameters = (parameters or {})
self._user = user
self._parent_args = (parent_args or [])
self._class_name = None
self._logger = logging.getLogger('PythonActionWrapper')
try:
config.parse_args(args=self._parent_args)
except ... |
':rtype: ``dict``'
| def _get_env_vars(self):
| env_vars = {}
if self._env:
env_vars.update(self._env)
st2_env_vars = self._get_common_action_env_variables()
env_vars.update(st2_env_vars)
return env_vars
|
':param stdout: Stdout which was consumed until the timeout occured.
:type stdout: ``str``
:param stdout: Stderr which was consumed until the timeout occured.
:type stderr: ``str``'
| def __init__(self, cmd, timeout, stdout=None, stderr=None):
| self.cmd = cmd
self.timeout = timeout
self.stdout = stdout
self.stderr = stderr
message = ("Command didn't finish in %s seconds" % timeout)
super(SSHCommandTimeoutError, self).__init__(message)
|
'Authentication is always attempted in the following order:
- The key passed in (if key is provided)
- Any key we can find through an SSH agent (only if no password and
key is provided)
- Any "id_rsa" or "id_dsa" key discoverable in ~/.ssh/ (only if no
password and key is provided)
- Plain username/password auth, if a ... | def __init__(self, hostname, port=DEFAULT_SSH_PORT, username=None, password=None, bastion_host=None, key_files=None, key_material=None, timeout=None, passphrase=None):
| self.hostname = hostname
self.port = port
self.username = username
self.password = password
self.key_files = key_files
self.timeout = (timeout or ParamikoSSHClient.CONNECT_TIMEOUT)
self.key_material = key_material
self.bastion_host = bastion_host
self.passphrase = passphrase
self... |
'Connect to the remote node over SSH.
:return: True if the connection has been successfully established,
False otherwise.
:rtype: ``bool``'
| def connect(self):
| if self.bastion_host:
self.logger.debug('Bastion host specified, connecting')
self.bastion_client = self._connect(host=self.bastion_host)
transport = self.bastion_client.get_transport()
real_addr = (self.hostname, self.port)
local_addr = ('', 0)
self.bastion_... |
'Upload a file to the remote node.
:type local_path: ``st``
:param local_path: File path on the local node.
:type remote_path: ``str``
:param remote_path: File path on the remote node.
:type mode: ``int``
:param mode: Permissions mode for the file. E.g. 0744.
:type mirror_local_mode: ``int``
:param mirror_local_mode: S... | def put(self, local_path, remote_path, mode=None, mirror_local_mode=False):
| if ((not local_path) or (not remote_path)):
raise Exception(('Need both local_path and remote_path. local: %s, remote: %s' % local_path), remote_path)
local_path = quote_unix(local_path)
remote_path = quote_unix(remote_path)
extra = {'_local_path': local_path, '_remote_pa... |
'Upload a dir to the remote node.
:type local_path: ``str``
:param local_path: Dir path on the local node.
:type remote_path: ``str``
:param remote_path: Base dir path on the remote node.
:type mode: ``int``
:param mode: Permissions mode for the file. E.g. 0744.
:type mirror_local_mode: ``int``
:param mirror_local_mode... | def put_dir(self, local_path, remote_path, mode=None, mirror_local_mode=False):
| extra = {'_local_path': local_path, '_remote_path': remote_path, '_mode': mode, '_mirror_local_mode': mirror_local_mode}
self.logger.debug('Uploading dir', extra=extra)
if os.path.basename(local_path):
strip = os.path.dirname(local_path)
else:
strip = os.path.dirname(os.path.dirname(l... |
'Validate whether a remote file or directory exists.
:param remote_path: Path to remote file.
:type remote_path: ``str``
:rtype: ``bool``'
| def exists(self, remote_path):
| try:
self.sftp.lstat(remote_path).st_mode
except IOError:
return False
return True
|
'Create a directory on remote box.
:param dir_path: Path to remote directory to be created.
:type dir_path: ``str``
:return: Returns nothing if successful else raises IOError exception.
:rtype: ``None``'
| def mkdir(self, dir_path):
| dir_path = quote_unix(dir_path)
extra = {'_dir_path': dir_path}
self.logger.debug('mkdir', extra=extra)
return self.sftp.mkdir(dir_path)
|
'Delete a file on remote box.
:param path: Path to remote file to be deleted.
:type path: ``str``
:return: True if the file has been successfully deleted, False
otherwise.
:rtype: ``bool``'
| def delete_file(self, path):
| path = quote_unix(path)
extra = {'_path': path}
self.logger.debug('Deleting file', extra=extra)
self.sftp.unlink(path)
return True
|
'Delete a dir on remote box.
:param path: Path to remote dir to be deleted.
:type path: ``str``
:param force: Optional Forcefully remove dir.
:type force: ``bool``
:param timeout: Optional Time to wait for dir to be deleted. Only relevant for force.
:type timeout: ``int``
:return: True if the file has been successfully... | def delete_dir(self, path, force=False, timeout=None):
| path = quote_unix(path)
extra = {'_path': path}
if force:
command = ('rm -rf %s' % path)
extra['_command'] = command
extra['_force'] = force
self.logger.debug('Deleting dir', extra=extra)
return self.run(command, timeout=timeout)
self.logger.debug('Deleti... |
'Note: This function is based on paramiko\'s exec_command()
method.
:param timeout: How long to wait (in seconds) for the command to
finish (optional).
:type timeout: ``float``'
| def run(self, cmd, timeout=None, quote=False):
| if quote:
cmd = quote_unix(cmd)
extra = {'_cmd': cmd}
self.logger.info('Executing command', extra=extra)
bufsize = (-1)
transport = self.client.get_transport()
chan = transport.open_session()
start_time = time.time()
if cmd.startswith('sudo'):
chan.get_pty()
chan.e... |
'Method which lazily establishes SFTP connection if one is not established yet when this
variable is accessed.'
| @property
def sftp(self):
| if (not self.sftp_client):
self.sftp_client = self.client.open_sftp()
return self.sftp_client
|
'Try to consume stdout data from chan if it\'s receive ready.'
| def _consume_stdout(self, chan):
| out = bytearray()
stdout = StringIO()
if chan.recv_ready():
data = chan.recv(self.CHUNK_SIZE)
out += data
while data:
ready = chan.recv_ready()
if (not ready):
break
data = chan.recv(self.CHUNK_SIZE)
out += data
stdo... |
'Try to consume stderr data from chan if it\'s receive ready.'
| def _consume_stderr(self, chan):
| out = bytearray()
stderr = StringIO()
if chan.recv_stderr_ready():
data = chan.recv_stderr(self.CHUNK_SIZE)
out += data
while data:
ready = chan.recv_stderr_ready()
if (not ready):
break
data = chan.recv_stderr(self.CHUNK_SIZE)
... |
'Try to detect private key type and return paramiko.PKey object.'
| def _get_pkey_object(self, key_material, passphrase):
| for cls in [paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey]:
try:
key = cls.from_private_key(StringIO(key_material), password=passphrase)
except paramiko.ssh_exception.SSHException:
pass
else:
return key
contains_header = (REMOTE_RUNNER_PRIVATE_KE... |
'Order of precedence for SSH connection parameters:
1. If user supplies parameters via action parameters, we use them to connect.
2. For parameters not supplied via action parameters, if there is an entry
for host in SSH config file, we use those. Note that this is a merge operation.
3. If user does not supply certain ... | def _connect(self, host, socket=None):
| conninfo = {'hostname': host, 'allow_agent': False, 'look_for_keys': False, 'timeout': self.timeout}
ssh_config_file_info = {}
if cfg.CONF.ssh_runner.use_ssh_config:
ssh_config_file_info = self._get_ssh_config_for_host(host)
self.username = (self.username or ssh_config_file_info.get('user', None... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.