Skip to main content
Version: 1.16.1

PandasGoogleCloudStorageDatasource

Signature

class great_expectations.datasource.fluent.PandasGoogleCloudStorageDatasource(
*,
type: Literal['pandas_gcs'] = 'pandas_gcs',
name: str,
id: Optional[uuid.UUID] = None,
assets: List[great_expectations.datasource.fluent.data_asset.path.file_asset.FileDataAsset] = [],
bucket_or_name: str,
gcs_options: Dict[str,
Union[great_expectations.datasource.fluent.config_str.ConfigStr,
Any]] = {}
)

PandasGoogleCloudStorageDatasource is a PandasDatasource that uses Google Cloud Storage as a data store.

Methods

add_csv_asset

Signature

add_csv_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd0610d4c50> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd0610d4d10> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd0610d4e60> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd0610d5010> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd0610d50d0> = None,
sep: typing.Optional[str] = None,
delimiter: typing.Optional[str] = None,
header: Union[int,
Sequence[int],
None,
Literal['infer']] = 'infer',
names: Union[Sequence[str],
None] = None,
index_col: Union[IndexLabel,
Literal[False],
None] = None,
usecols: typing.Optional[typing.Union[int,
str,
typing.Sequence[int]]] = None,
dtype: typing.Optional[dict] = None,
engine: Union[CSVEngine,
None] = None,
true_values: typing.Optional[typing.List] = None,
false_values: typing.Optional[typing.List] = None,
skipinitialspace: bool = False,
skiprows: typing.Optional[typing.Union[typing.Sequence[int],
int]] = None,
skipfooter: int = 0,
nrows: typing.Optional[int] = None,
na_values: Union[str,
Iterable[str],
None] = None,
keep_default_na: bool = True,
na_filter: bool = True,
skip_blank_lines: bool = True,
parse_dates: Union[bool,
Sequence[str],
None] = None,
date_format: typing.Optional[str] = None,
dayfirst: bool = False,
cache_dates: bool = True,
iterator: bool = False,
chunksize: typing.Optional[int] = None,
compression: CompressionOptions = 'infer',
thousands: typing.Optional[str] = None,
decimal: str = '.',
lineterminator: typing.Optional[str] = None,
quotechar: str = '"',
quoting: int = 0,
doublequote: bool = True,
escapechar: typing.Optional[str] = None,
comment: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
encoding_errors: typing.Optional[str] = 'strict',
dialect: typing.Optional[str] = None,
on_bad_lines: str = 'error',
low_memory: bool = True,
memory_map: bool = False,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a csv asset to the datasource.

add_excel_asset

Signature

add_excel_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd0610d6270> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd0610d6030> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd0610d5e20> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd0610d6db0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd0610d6e70> = None,
sheet_name: typing.Optional[typing.Union[str,
int,
typing.List[typing.Union[int,
str]]]] = 0,
header: Union[int,
Sequence[int],
None] = 0,
index_col: Union[int,
str,
Sequence[int],
None] = None,
usecols: typing.Optional[typing.Union[int,
str,
typing.Sequence[int]]] = None,
dtype: typing.Optional[dict] = None,
true_values: Union[Iterable[str],
None] = None,
false_values: Union[Iterable[str],
None] = None,
skiprows: typing.Optional[typing.Union[typing.Sequence[int],
int]] = None,
nrows: typing.Optional[int] = None,
na_values: typing.Any = None,
keep_default_na: bool = True,
na_filter: bool = True,
verbose: bool = False,
parse_dates: typing.Union[typing.List,
typing.Dict,
bool] = False,
date_format: typing.Optional[str] = None,
thousands: typing.Optional[str] = None,
decimal: str = '.',
comment: typing.Optional[str] = None,
skipfooter: int = 0,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
engine_kwargs: typing.Optional[typing.Dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add an excel asset to the datasource.

add_feather_asset

Signature

add_feather_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd060fd82c0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd060fd8380> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd060fd84d0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd060fd8680> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd060fd8740> = None,
columns: Union[Sequence[str],
None] = None,
use_threads: bool = True,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a feather asset to the datasource.

add_fwf_asset

Signature

add_fwf_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd060fd8e60> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd060fd8f20> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9070> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9220> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd060fd92e0> = None,
colspecs: Union[Sequence[Tuple[int,
int]],
str,
None] = 'infer',
widths: Union[Sequence[int],
None] = None,
infer_nrows: int = 100,
iterator: bool = False,
chunksize: typing.Optional[int] = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a fwf asset to the datasource.

add_hdf_asset

Signature

add_hdf_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9b50> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9c10> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9d60> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9f10> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd060fd9fd0> = None,
key: typing.Any = None,
mode: str = 'r',
errors: str = 'strict',
where: typing.Optional[typing.Union[str,
typing.List]] = None,
start: typing.Optional[int] = None,
stop: typing.Optional[int] = None,
columns: typing.Optional[typing.List[str]] = None,
iterator: bool = False,
chunksize: typing.Optional[int] = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a hdf asset to the datasource.

add_html_asset

Signature

add_html_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd060fda7b0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd060fda870> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd060fda9c0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd060fdab70> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd060fdac30> = None,
match: Union[str,
Pattern] = '.+',
header: Union[int,
Sequence[int],
None] = None,
index_col: Union[int,
Sequence[int],
None] = None,
skiprows: typing.Optional[typing.Union[typing.Sequence[int],
int]] = None,
attrs: typing.Optional[typing.Dict[str,
str]] = None,
parse_dates: bool = False,
thousands: typing.Optional[str] = ',
',
encoding: typing.Optional[str] = None,
decimal: str = '.',
converters: typing.Optional[typing.Dict] = None,
na_values: Union[Iterable[object],
None] = None,
keep_default_na: bool = True,
displayed_only: bool = True,
dtype_backend: DtypeBackend = None,
storage_options: StorageOptions = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a html asset to the datasource.

add_iceberg_asset

Signature

add_iceberg_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd060fdb860> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd060fdb920> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd060fdba70> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd060fdbc20> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd060fdbce0> = None,
catalog_name: str | None = None,
catalog_properties: dict[str,
typing.Any] | None = None,
columns: list[str] | None = None,
row_filter: str | None = None,
case_sensitive: bool = True,
snapshot_id: int | None = None,
limit: int | None = None,
scan_properties: dict[str,
typing.Any] | None = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add an iceberg asset to the datasource.

add_json_asset

Signature

add_json_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd061010470> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd061010530> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd061010680> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd061010830> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd0610108f0> = None,
orient: typing.Optional[str] = None,
typ: Literal['frame',
'series'] = 'frame',
dtype: typing.Optional[dict] = None,
convert_axes: typing.Optional[bool] = None,
convert_dates: typing.Union[bool,
typing.List[str]] = True,
keep_default_dates: bool = True,
precise_float: bool = False,
date_unit: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
encoding_errors: typing.Optional[str] = 'strict',
lines: bool = False,
chunksize: typing.Optional[int] = None,
compression: CompressionOptions = 'infer',
nrows: typing.Optional[int] = None,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a json asset to the datasource.

add_orc_asset

Signature

add_orc_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd061011400> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd0610114c0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd061011610> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd0610117c0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd061011880> = None,
columns: typing.Optional[typing.List[str]] = None,
dtype_backend: DtypeBackend = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add an orc asset to the datasource.

add_parquet_asset

Signature

add_parquet_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd061011fd0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd061012090> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd0610121e0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd061012390> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd061012450> = None,
engine: str = 'auto',
columns: typing.Optional[typing.List[str]] = None,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
to_pandas_kwargs: typing.Optional[typing.Dict] = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a parquet asset to the datasource.

add_pickle_asset

Signature

add_pickle_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd061012bd0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd061012c90> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd061012de0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd061012f90> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd061013050> = None,
compression: CompressionOptions = 'infer',
storage_options: Union[StorageOptions,
None] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a pickle asset to the datasource.

add_sas_asset

Signature

add_sas_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd061013740> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd061013800> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd061013950> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd061013b00> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd061013bc0> = None,
format: typing.Optional[str] = None,
index: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
chunksize: typing.Optional[int] = None,
iterator: bool = False,
compression: CompressionOptions = 'infer',
**extra_data: typing.Any
) → pydantic.BaseModel

Add a sas asset to the datasource.

add_spss_asset

Signature

add_spss_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd06103c3b0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd06103c470> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd06103c5c0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd06103c770> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd06103c830> = None,
usecols: typing.Optional[typing.Union[int,
str,
typing.Sequence[int]]] = None,
convert_categoricals: bool = True,
dtype_backend: DtypeBackend = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a spss asset to the datasource.

add_stata_asset

Signature

add_stata_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd06103d040> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd06103d100> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd06103d250> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd06103d400> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd06103d4c0> = None,
convert_dates: bool = True,
convert_categoricals: bool = True,
index_col: typing.Optional[str] = None,
convert_missing: bool = False,
preserve_dtypes: bool = True,
columns: Union[Sequence[str],
None] = None,
order_categoricals: bool = True,
chunksize: typing.Optional[int] = None,
iterator: bool = False,
compression: CompressionOptions = 'infer',
storage_options: Union[StorageOptions,
None] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a stata asset to the datasource.

add_xml_asset

Signature

add_xml_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7fd06103ddc0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7fd06103de80> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fd06103dfd0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fd06103e180> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7fd06103e240> = None,
xpath: str = './*',
namespaces: typing.Optional[typing.Dict[str,
str]] = None,
elems_only: bool = False,
attrs_only: bool = False,
names: Union[Sequence[str],
None] = None,
dtype: typing.Optional[dict] = None,
encoding: typing.Optional[str] = 'utf-8',
stylesheet: Union[FilePath,
None] = None,
iterparse: typing.Optional[typing.Dict[str,
typing.List[str]]] = None,
compression: CompressionOptions = 'infer',
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a xml asset to the datasource.

delete_asset

Signature

delete_asset(
name: str
)None

Removes the DataAsset referred to by asset_name from internal list of available DataAsset objects.

Parameters

NameDescription

name

name of DataAsset to be deleted.

get_asset

Signature

get_asset(
name: str
) → great_expectations.datasource.fluent.interfaces._DataAssetT

Returns the DataAsset referred to by asset_name

Parameters

NameDescription

name

name of DataAsset sought.

Returns

TypeDescription

great_expectations.datasource.fluent.interfaces._DataAssetT

if named "DataAsset" object exists; otherwise, exception is raised.