beamds.beam.data package#

Submodules#

beamds.beam.data.beam_data module#

class beamds.beam.data.beam_data.BeamData(*args, data=None, path=None, name=None, all_paths=None, index=None, label=None, columns=None, lazy=True, device=None, target_device=None, schema=None, override=False, compress=None, split_by='keys', chunksize=1000000000, chunklen=None, n_chunks=None, key_map=None, partition=None, archive_size=1000000, preferred_orientation='index', read_kwargs=None, write_kwargs=None, quick_getitem=False, orientation=None, glob_filter=None, info=None, synced=False, write_metadata=True, read_metadata=True, metadata_path_prefix=None, key_fold_map=None, chunksize_policy='round', **kwargs)[source]#

Bases: BeamName

abs_all_paths(all_paths=None, root_path=None)[source]#
property all_paths#
apply(func, *args, preferred_orientation='columns', **kwargs)[source]#
as_numpy()[source]#

Convert the data to numpy in place @return:

as_tensor(device=None, dtype=None, return_vector=False)[source]#

Convert the data to tensor in place @param device: @param dtype: @param return_vector: @return:

cache(path=None, all_paths=None, schema=None, update=False, in_place=True, **kwargs)[source]#
cached(*args, **kwargs)[source]#
clear_index()[source]#
clear_label()[source]#
static clear_metadata(path)[source]#
clone(*args, data=None, path=None, all_paths=None, key_map=None, index=None, label=None, columns=None, schema=None, orientation=None, info=None, constructor=None, key_fold_map=None, **kwargs)[source]#
static collate(*args, batch=None, split_by=None, **kwargs)[source]#
columns_chunk_file_extension = '.columns_chunk'#
property columns_map#
columns_partition_directory_name = '.columns_part'#
static concat(bds, dim=0)[source]#
concatenate_values(data=None, orientation=None, objects_type=None)[source]#
property conf#
static containerize_keys_and_values(keys, values)[source]#
static data_batch(data, index=None, label=None, orientation=None, info=None, flatten_index=False, flatten_label=False)[source]#
property data_slicer[source]#
property data_type#
property data_types#
default_data_file_name = 'data_container'#
property device#
divide_chunks(**kwargs)[source]#
property dtypes#
static exists(paths)[source]#
property flatten_data#
property flatten_items#
classmethod from_indexed_pandas(data, *args, **kwargs)[source]#
classmethod from_path(path, *args, **kwargs)[source]#
get_default_params(*args, **kwargs)[source]#

Get default parameters from the class

@param args: @param kwargs: @return:

get_index_by_key_fold_map(keys, keys_type=None)[source]#
get_info_groups()[source]#
static get_n_chunks(data, n_chunks=None, chunklen=None, chunksize=None, size=None, chunksize_policy='round')[source]#
static get_schema_from_subset(schema, key, schema_type=None)[source]#
static get_schema_from_tupled_key(schema, key, schema_type=None)[source]#
property has_index#
property has_label#
property hash#
head(n=20)[source]#
hierarchical_keys(recursive=False)[source]#
property index#
index_chunk_file_extension = '.index_chunk'#
index_partition_directory_name = '.index_part'#
property index_slicer[source]#
property index_type[source]#
property info#
inverse_columns_map(columns)[source]#
inverse_map(ind)[source]#
items(level=1)[source]#
property key_map#
keys(level=1)[source]#
property label#
property label_slicer[source]#
property label_type[source]#
classmethod load_state_dict(state_dict)[source]#
metadata_files = {'all_paths': '.all_paths.pkl', 'aux': '.aux.pkl', 'conf': '.conf.pkl', 'index': '.index', 'info': '.info.fea', 'label': '.label', 'schema': '.schema.pkl'}#
metadata_path_exists(key)[source]#
property metadata_paths#
static normalize_key(key)[source]#
property objects_type#
property orientation#
property parent#
property path#
static read(paths, schema=None, strict=False, _check_existence=False, **kwargs)[source]#
recursive_filter(x, info)[source]#
static recursive_map_path(root_path, relative_path=None, glob_filter=None)[source]#
static recursive_root_finder(all_paths, head=None)[source]#
reset_index()[source]#
reset_metadata(*args, avoid_reset=None)[source]#
property root_path#
sample(n, replace=True)[source]#
property schema#
property schema_type#
static set_first_key(key, value)[source]#
set_property(p)[source]#
property shape#
classmethod simple(*args, preferred_orientation='index', **kwargs)[source]#
property simplified#
single_file_case(root_path, all_paths, metadata_paths)[source]#
property size#
slice_columns(columns)[source]#
slice_data(index)[source]#
slice_index(index, index_type=None)[source]#
slice_keys(keys)[source]#
static slice_scalar_or_list(data, keys, data_type=None, keys_type=None, replace_missing=False)[source]#
property stack#
property stacked_index#
property stacked_labels#
property stacked_values#
state_dict()[source]#
store(path=None, data=None, compress=None, chunksize=None, chunklen=None, n_chunks=None, partition=None, split_by=None, archive_size=None, override=None, split=True, chunksize_policy=None, **kwargs)[source]#
to(device)[source]#
to_path(path)[source]#
property total_size#
update_all_paths_file()[source]#
static update_hierarchy(root_path, all_paths)[source]#
property values#
static write_file(data, path, override=True, schema=None, **kwargs)[source]#
static write_object(data, path, override=True, size=None, archive=False, compress=None, chunksize=1000000000, chunklen=None, n_chunks=None, partition=None, file_type=None, schema=None, textual_serialization=False, split_by=None, split=True, priority=None, blacklist_priority=None, chunksize_policy='round', **kwargs)[source]#
static write_tree(data, path, sizes=None, split_by='keys', archive_size=1000000, chunksize=1000000000, override=True, chunklen=None, n_chunks=None, partition=None, file_type=None, root=False, schema=None, split=False, textual_serialization=False, blacklist_priority=None, chunksize_policy='round', **kwargs)[source]#

beamds.beam.data.beam_schema module#

class beamds.beam.data.beam_schema.BeamSchema(read_schema=None, write_schema=None, **kwargs)[source]#

Bases: object

property read_schema#
property write_schema#

beamds.beam.data.elements module#

Module contents#