Skip to main content
Version: 1.3.13

PandasGoogleCloudStorageDatasource

Signature

class great_expectations.datasource.fluent.PandasGoogleCloudStorageDatasource(
*,
type: Literal['pandas_gcs'] = 'pandas_gcs',
name: str,
id: Optional[uuid.UUID] = None,
assets: List[great_expectations.datasource.fluent.data_asset.path.file_asset.FileDataAsset] = [],
bucket_or_name: str,
gcs_options: Dict[str,
Union[great_expectations.datasource.fluent.config_str.ConfigStr,
Any]] = {}
)

PandasGoogleCloudStorageDatasource is a PandasDatasource that uses Google Cloud Storage as a data store.

Methods

add_csv_asset

Signature

add_csv_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f57419c9e20> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f57419c9ee0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f57419ca030> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f57419ca1e0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f57419ca3c0> = None,
sep: typing.Optional[str] = None,
delimiter: typing.Optional[str] = None,
header: Union[int,
Sequence[int],
None,
Literal['infer']] = 'infer',
names: Union[Sequence[str],
None] = None,
index_col: Union[IndexLabel,
Literal[False],
None] = None,
usecols: typing.Optional[typing.Union[int,
str,
typing.Sequence[int]]] = None,
dtype: typing.Optional[dict] = None,
engine: Union[CSVEngine,
None] = None,
true_values: typing.Optional[typing.List] = None,
false_values: typing.Optional[typing.List] = None,
skipinitialspace: bool = False,
skiprows: typing.Optional[typing.Union[typing.Sequence[int],
int]] = None,
skipfooter: int = 0,
nrows: typing.Optional[int] = None,
na_values: Union[Sequence[str],
None] = None,
keep_default_na: bool = True,
na_filter: bool = True,
verbose: bool = False,
skip_blank_lines: bool = True,
parse_dates: Union[bool,
Sequence[str],
None] = None,
infer_datetime_format: bool = None,
keep_date_col: bool = False,
date_format: typing.Optional[str] = None,
dayfirst: bool = False,
cache_dates: bool = True,
iterator: bool = False,
chunksize: typing.Optional[int] = None,
compression: CompressionOptions = 'infer',
thousands: typing.Optional[str] = None,
decimal: str = '.',
lineterminator: typing.Optional[str] = None,
quotechar: str = '"',
quoting: int = 0,
doublequote: bool = True,
escapechar: typing.Optional[str] = None,
comment: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
encoding_errors: typing.Optional[str] = 'strict',
dialect: typing.Optional[str] = None,
on_bad_lines: str = 'error',
delim_whitespace: bool = False,
low_memory: bool = True,
memory_map: bool = False,
float_precision: Union[Literal['high',
'legacy'],
None] = None,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a csv asset to the datasource.

add_excel_asset

Signature

add_excel_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f57419cbd10> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f57419cb140> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f57419cb320> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a08140> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a08200> = None,
sheet_name: typing.Optional[typing.Union[str,
int,
typing.List[typing.Union[int,
str]]]] = 0,
header: Union[int,
Sequence[int],
None] = 0,
names: typing.Optional[typing.List[str]] = None,
index_col: Union[int,
Sequence[int],
None] = None,
usecols: typing.Optional[typing.Union[int,
str,
typing.Sequence[int]]] = None,
dtype: typing.Optional[dict] = None,
engine: Union[Literal['xlrd',
'openpyxl',
'odf',
'pyxlsb'],
None] = None,
true_values: Union[Iterable[str],
None] = None,
false_values: Union[Iterable[str],
None] = None,
skiprows: typing.Optional[typing.Union[typing.Sequence[int],
int]] = None,
nrows: typing.Optional[int] = None,
na_values: typing.Any = None,
keep_default_na: bool = True,
na_filter: bool = True,
verbose: bool = False,
parse_dates: typing.Union[typing.List,
typing.Dict,
bool] = False,
date_format: typing.Optional[str] = None,
thousands: typing.Optional[str] = None,
decimal: str = '.',
comment: typing.Optional[str] = None,
skipfooter: int = 0,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
engine_kwargs: typing.Optional[typing.Dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add an excel asset to the datasource.

add_feather_asset

Signature

add_feather_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a08d40> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a09340> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a09490> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a09640> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a09700> = None,
columns: Union[Sequence[str],
None] = None,
use_threads: bool = True,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a feather asset to the datasource.

add_fwf_asset

Signature

add_fwf_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a09e80> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a09f40> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a0a090> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a0a240> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a0a300> = None,
colspecs: Union[Sequence[Tuple[int,
int]],
str,
None] = 'infer',
widths: Union[Sequence[int],
None] = None,
infer_nrows: int = 100,
dtype_backend: DtypeBackend = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a fwf asset to the datasource.

add_hdf_asset

Signature

add_hdf_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a0aba0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a0ac60> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a0adb0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a0af60> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a0b020> = None,
key: typing.Any = None,
mode: str = 'r',
errors: str = 'strict',
where: typing.Optional[typing.Union[str,
typing.List]] = None,
start: typing.Optional[int] = None,
stop: typing.Optional[int] = None,
columns: typing.Optional[typing.List[str]] = None,
iterator: bool = False,
chunksize: typing.Optional[int] = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a hdf asset to the datasource.

add_html_asset

Signature

add_html_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a0b800> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a0b8c0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a0ba10> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a0bbc0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a0bc80> = None,
match: Union[str,
Pattern] = '.+',
flavor: typing.Optional[str] = None,
header: Union[int,
Sequence[int],
None] = None,
index_col: Union[int,
Sequence[int],
None] = None,
skiprows: typing.Optional[typing.Union[typing.Sequence[int],
int]] = None,
attrs: typing.Optional[typing.Dict[str,
str]] = None,
parse_dates: bool = False,
thousands: typing.Optional[str] = ',
',
encoding: typing.Optional[str] = None,
decimal: str = '.',
converters: typing.Optional[typing.Dict] = None,
na_values: Union[Iterable[object],
None] = None,
keep_default_na: bool = True,
displayed_only: bool = True,
extract_links: Literal[None,
'header',
'footer',
'body',
'all'] = None,
dtype_backend: DtypeBackend = None,
storage_options: StorageOptions = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a html asset to the datasource.

add_json_asset

Signature

add_json_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a449b0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a44a70> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a44bc0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a44d70> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a44e30> = None,
orient: typing.Optional[str] = None,
typ: Literal['frame',
'series'] = 'frame',
dtype: typing.Optional[dict] = None,
convert_axes: typing.Optional[bool] = None,
convert_dates: typing.Union[bool,
typing.List[str]] = True,
keep_default_dates: bool = True,
precise_float: bool = False,
date_unit: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
encoding_errors: typing.Optional[str] = 'strict',
lines: bool = False,
chunksize: typing.Optional[int] = None,
compression: CompressionOptions = 'infer',
nrows: typing.Optional[int] = None,
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a json asset to the datasource.

add_orc_asset

Signature

add_orc_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a459a0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a45a60> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a45bb0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a45d60> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a45e20> = None,
columns: typing.Optional[typing.List[str]] = None,
dtype_backend: DtypeBackend = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add an orc asset to the datasource.

add_parquet_asset

Signature

add_parquet_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a46540> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a46600> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a46750> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a46900> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a469c0> = None,
engine: str = 'auto',
columns: typing.Optional[typing.List[str]] = None,
storage_options: Union[StorageOptions,
None] = None,
use_nullable_dtypes: bool = None,
dtype_backend: DtypeBackend = None,
kwargs: typing.Optional[dict] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a parquet asset to the datasource.

add_pickle_asset

Signature

add_pickle_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a47140> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a47200> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a47350> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741a47500> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741a475c0> = None,
compression: CompressionOptions = 'infer',
storage_options: Union[StorageOptions,
None] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a pickle asset to the datasource.

add_sas_asset

Signature

add_sas_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741a47c80> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741a47d40> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741a47e90> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741870080> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741870140> = None,
format: typing.Optional[str] = None,
index: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
chunksize: typing.Optional[int] = None,
iterator: bool = False,
compression: CompressionOptions = 'infer',
**extra_data: typing.Any
) → pydantic.BaseModel

Add a sas asset to the datasource.

add_spss_asset

Signature

add_spss_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f57418708f0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f57418709b0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741870b00> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741870cb0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741870d70> = None,
usecols: typing.Optional[typing.Union[int,
str,
typing.Sequence[int]]] = None,
convert_categoricals: bool = True,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a spss asset to the datasource.

add_stata_asset

Signature

add_stata_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f5741871550> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741871610> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f5741871760> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741871910> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f57418719d0> = None,
convert_dates: bool = True,
convert_categoricals: bool = True,
index_col: typing.Optional[str] = None,
convert_missing: bool = False,
preserve_dtypes: bool = True,
columns: Union[Sequence[str],
None] = None,
order_categoricals: bool = True,
chunksize: typing.Optional[int] = None,
iterator: bool = False,
compression: CompressionOptions = 'infer',
storage_options: Union[StorageOptions,
None] = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a stata asset to the datasource.

add_xml_asset

Signature

add_xml_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f57418722d0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f5741872390> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f57418724e0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f5741872690> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f5741872750> = None,
xpath: str = './*',
namespaces: typing.Optional[typing.Dict[str,
str]] = None,
elems_only: bool = False,
attrs_only: bool = False,
names: Union[Sequence[str],
None] = None,
dtype: typing.Optional[dict] = None,
encoding: typing.Optional[str] = 'utf-8',
stylesheet: Union[FilePath,
None] = None,
iterparse: typing.Optional[typing.Dict[str,
typing.List[str]]] = None,
compression: CompressionOptions = 'infer',
storage_options: Union[StorageOptions,
None] = None,
dtype_backend: DtypeBackend = None,
**extra_data: typing.Any
) → pydantic.BaseModel

Add a xml asset to the datasource.

delete_asset

Signature

delete_asset(
name: str
)None

Removes the DataAsset referred to by asset_name from internal list of available DataAsset objects.

Parameters

NameDescription

name

name of DataAsset to be deleted.

get_asset

Signature

get_asset(
name: str
) → great_expectations.datasource.fluent.interfaces._DataAssetT

Returns the DataAsset referred to by asset_name

Parameters

NameDescription

name

name of DataAsset sought.

Returns

TypeDescription

great_expectations.datasource.fluent.interfaces._DataAssetT

if named "DataAsset" object exists; otherwise, exception is raised.


Was this topic helpful?