| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | """Shared helper functions for connecting BigQuery and pyarrow.""" |
| |
|
| | from typing import Any |
| |
|
| | from packaging import version |
| |
|
| | try: |
| | import pyarrow |
| | except ImportError: |
| | pyarrow = None |
| |
|
| |
|
| | def pyarrow_datetime(): |
| | return pyarrow.timestamp("us", tz=None) |
| |
|
| |
|
| | def pyarrow_numeric(): |
| | return pyarrow.decimal128(38, 9) |
| |
|
| |
|
| | def pyarrow_bignumeric(): |
| | |
| | |
| | return pyarrow.decimal256(76, 38) |
| |
|
| |
|
| | def pyarrow_time(): |
| | return pyarrow.time64("us") |
| |
|
| |
|
| | def pyarrow_timestamp(): |
| | return pyarrow.timestamp("us", tz="UTC") |
| |
|
| |
|
| | _BQ_TO_ARROW_SCALARS = {} |
| | _ARROW_SCALAR_IDS_TO_BQ = {} |
| |
|
| | if pyarrow: |
| | |
| | |
| | |
| | _BQ_TO_ARROW_SCALARS = { |
| | "BOOL": pyarrow.bool_, |
| | "BOOLEAN": pyarrow.bool_, |
| | "BYTES": pyarrow.binary, |
| | "DATE": pyarrow.date32, |
| | "DATETIME": pyarrow_datetime, |
| | "FLOAT": pyarrow.float64, |
| | "FLOAT64": pyarrow.float64, |
| | "GEOGRAPHY": pyarrow.string, |
| | "INT64": pyarrow.int64, |
| | "INTEGER": pyarrow.int64, |
| | "NUMERIC": pyarrow_numeric, |
| | "STRING": pyarrow.string, |
| | "TIME": pyarrow_time, |
| | "TIMESTAMP": pyarrow_timestamp, |
| | } |
| |
|
| | _ARROW_SCALAR_IDS_TO_BQ = { |
| | |
| | pyarrow.bool_().id: "BOOL", |
| | pyarrow.int8().id: "INT64", |
| | pyarrow.int16().id: "INT64", |
| | pyarrow.int32().id: "INT64", |
| | pyarrow.int64().id: "INT64", |
| | pyarrow.uint8().id: "INT64", |
| | pyarrow.uint16().id: "INT64", |
| | pyarrow.uint32().id: "INT64", |
| | pyarrow.uint64().id: "INT64", |
| | pyarrow.float16().id: "FLOAT64", |
| | pyarrow.float32().id: "FLOAT64", |
| | pyarrow.float64().id: "FLOAT64", |
| | pyarrow.time32("ms").id: "TIME", |
| | pyarrow.time64("ns").id: "TIME", |
| | pyarrow.timestamp("ns").id: "TIMESTAMP", |
| | pyarrow.date32().id: "DATE", |
| | pyarrow.date64().id: "DATETIME", |
| | pyarrow.binary().id: "BYTES", |
| | pyarrow.string().id: "STRING", |
| | pyarrow.large_string().id: "STRING", |
| | |
| | pyarrow.decimal128(38, scale=9).id: "NUMERIC", |
| | } |
| |
|
| | |
| | |
| | |
| | if version.parse(pyarrow.__version__) >= version.parse("3.0.0"): |
| | _BQ_TO_ARROW_SCALARS["BIGNUMERIC"] = pyarrow_bignumeric |
| | |
| | |
| | _ARROW_SCALAR_IDS_TO_BQ[pyarrow.decimal256(76, scale=38).id] = "BIGNUMERIC" |
| |
|
| |
|
| | def bq_to_arrow_scalars(bq_scalar: str): |
| | """ |
| | Returns: |
| | The Arrow scalar type that the input BigQuery scalar type maps to. |
| | If it cannot find the BigQuery scalar, return None. |
| | """ |
| | return _BQ_TO_ARROW_SCALARS.get(bq_scalar) |
| |
|
| |
|
| | def arrow_scalar_ids_to_bq(arrow_scalar: Any): |
| | """ |
| | Returns: |
| | The BigQuery scalar type that the input arrow scalar type maps to. |
| | If it cannot find the arrow scalar, return None. |
| | """ |
| | return _ARROW_SCALAR_IDS_TO_BQ.get(arrow_scalar) |
| |
|