Skip to content

Data Download

PyFIA provides functions to download FIA data directly from the USDA Forest Service FIA DataMart.

Overview

from pyfia import download

# Download single state
db_path = download("GA")

# Download multiple states (merged)
db_path = download(["GA", "FL", "SC"])

# Download specific tables
db_path = download("GA", tables=["PLOT", "TREE", "COND"])

Main Function

download

download(states: str | list[str], dir: str | Path | None = None, common: bool = True, tables: list[str] | None = None, force: bool = False, show_progress: bool = True, use_cache: bool = True) -> Path

Download FIA data from the FIA DataMart.

This function downloads FIA data for one or more states from the USDA Forest Service FIA DataMart, similar to rFIA's getFIA() function. Data is automatically converted to DuckDB format for use with pyFIA.

PARAMETER DESCRIPTION
states

State abbreviations (e.g., 'GA', 'NC'). Supports multiple states: ['GA', 'FL', 'SC']

TYPE: str or list of str

dir

Directory to save downloaded data. Defaults to ~/.pyfia/data/

TYPE: str or Path DEFAULT: None

common

If True, download only tables required for pyFIA functions. If False, download all available tables.

TYPE: bool DEFAULT: True

tables

Specific tables to download. Overrides common parameter.

TYPE: list of str DEFAULT: None

force

If True, re-download even if files exist locally.

TYPE: bool DEFAULT: False

show_progress

Show download progress bars.

TYPE: bool DEFAULT: True

use_cache

Use cached downloads if available.

TYPE: bool DEFAULT: True

RETURNS DESCRIPTION
Path

Path to the DuckDB database file.

RAISES DESCRIPTION
StateNotFoundError

If an invalid state code is provided.

TableNotFoundError

If a requested table is not available.

NetworkError

If download fails due to network issues.

DownloadError

For other download-related errors.

Examples:

>>> from pyfia import download
>>>
>>> # Download Georgia data
>>> db_path = download("GA")
>>>
>>> # Download multiple states merged into one database
>>> db_path = download(["GA", "FL", "SC"])
>>>
>>> # Download only specific tables
>>> db_path = download("GA", tables=["PLOT", "TREE", "COND"])
>>>
>>> # Use with pyFIA immediately
>>> from pyfia import FIA, area
>>> with FIA(download("GA")) as db:
...     db.clip_most_recent()
...     result = area(db)
Notes
  • Large states (CA, TX) may have TREE tables >1GB compressed
  • First download may take several minutes depending on connection
  • Downloaded data is cached locally to avoid re-downloading
Source code in src/pyfia/downloader/__init__.py
def download(
    states: str | list[str],
    dir: str | Path | None = None,
    common: bool = True,
    tables: list[str] | None = None,
    force: bool = False,
    show_progress: bool = True,
    use_cache: bool = True,
) -> Path:
    """
    Download FIA data from the FIA DataMart.

    This function downloads FIA data for one or more states from the USDA
    Forest Service FIA DataMart, similar to rFIA's getFIA() function.
    Data is automatically converted to DuckDB format for use with pyFIA.

    Parameters
    ----------
    states : str or list of str
        State abbreviations (e.g., 'GA', 'NC').
        Supports multiple states: ['GA', 'FL', 'SC']
    dir : str or Path, optional
        Directory to save downloaded data. Defaults to ~/.pyfia/data/
    common : bool, default True
        If True, download only tables required for pyFIA functions.
        If False, download all available tables.
    tables : list of str, optional
        Specific tables to download. Overrides `common` parameter.
    force : bool, default False
        If True, re-download even if files exist locally.
    show_progress : bool, default True
        Show download progress bars.
    use_cache : bool, default True
        Use cached downloads if available.

    Returns
    -------
    Path
        Path to the DuckDB database file.

    Raises
    ------
    StateNotFoundError
        If an invalid state code is provided.
    TableNotFoundError
        If a requested table is not available.
    NetworkError
        If download fails due to network issues.
    DownloadError
        For other download-related errors.

    Examples
    --------
    >>> from pyfia import download
    >>>
    >>> # Download Georgia data
    >>> db_path = download("GA")
    >>>
    >>> # Download multiple states merged into one database
    >>> db_path = download(["GA", "FL", "SC"])
    >>>
    >>> # Download only specific tables
    >>> db_path = download("GA", tables=["PLOT", "TREE", "COND"])
    >>>
    >>> # Use with pyFIA immediately
    >>> from pyfia import FIA, area
    >>> with FIA(download("GA")) as db:
    ...     db.clip_most_recent()
    ...     result = area(db)

    Notes
    -----
    - Large states (CA, TX) may have TREE tables >1GB compressed
    - First download may take several minutes depending on connection
    - Downloaded data is cached locally to avoid re-downloading
    """
    # Normalize states to list
    if isinstance(states, str):
        states = [states]

    # Validate all state codes
    validated_states = [validate_state_code(s) for s in states]

    # Set default directory
    if dir is None:
        data_dir = _get_default_data_dir()
    else:
        data_dir = Path(dir).expanduser()

    data_dir.mkdir(parents=True, exist_ok=True)

    # Create client and cache
    client = DataMartClient()
    cache = DownloadCache(data_dir / ".cache")

    # Handle single state vs multi-state
    if len(validated_states) == 1:
        return _download_single_state(
            state=validated_states[0],
            data_dir=data_dir,
            client=client,
            cache=cache,
            common=common,
            tables=tables,
            force=force,
            show_progress=show_progress,
            use_cache=use_cache,
        )
    else:
        return _download_multi_state(
            states=validated_states,
            data_dir=data_dir,
            client=client,
            cache=cache,
            common=common,
            tables=tables,
            force=force,
            show_progress=show_progress,
            use_cache=use_cache,
        )

DataMart Client

DataMartClient

DataMartClient(timeout: int = 300, chunk_size: int = 1024 * 1024, max_retries: int = 3)

HTTP client for FIA DataMart downloads.

This client handles downloading CSV files from the FIA DataMart, with support for progress bars, retries, and checksum verification.

PARAMETER DESCRIPTION
timeout

Request timeout in seconds.

TYPE: int DEFAULT: 300

chunk_size

Download chunk size in bytes (default 1MB).

TYPE: int DEFAULT: 1048576

max_retries

Maximum number of retry attempts for failed downloads.

TYPE: int DEFAULT: 3

Examples:

>>> client = DataMartClient()
>>> path = client.download_table("GA", "PLOT", Path("./data"))
>>> print(f"Downloaded to: {path}")
Source code in src/pyfia/downloader/client.py
def __init__(
    self,
    timeout: int = 300,
    chunk_size: int = 1024 * 1024,
    max_retries: int = 3,
):
    self.timeout = timeout
    self.chunk_size = chunk_size
    self.max_retries = max_retries
    self.session = requests.Session()
    self.session.headers.update({"User-Agent": "pyFIA/1.0 (download client)"})

download_table

download_table(state: str, table: str, dest_dir: Path, show_progress: bool = True) -> Path

Download a single FIA table for a state.

PARAMETER DESCRIPTION
state

State abbreviation (e.g., 'GA') or 'REF' for reference tables.

TYPE: str

table

Table name (e.g., 'PLOT', 'TREE').

TYPE: str

dest_dir

Directory to save the extracted CSV file.

TYPE: Path

show_progress

Show download progress bar.

TYPE: bool DEFAULT: True

RETURNS DESCRIPTION
Path

Path to the extracted CSV file.

RAISES DESCRIPTION
StateNotFoundError

If the state code is invalid.

TableNotFoundError

If the table is not found for the state.

NetworkError

If the download fails.

Examples:

>>> client = DataMartClient()
>>> csv_path = client.download_table("GA", "PLOT", Path("./data"))
Source code in src/pyfia/downloader/client.py
def download_table(
    self,
    state: str,
    table: str,
    dest_dir: Path,
    show_progress: bool = True,
) -> Path:
    """
    Download a single FIA table for a state.

    Parameters
    ----------
    state : str
        State abbreviation (e.g., 'GA') or 'REF' for reference tables.
    table : str
        Table name (e.g., 'PLOT', 'TREE').
    dest_dir : Path
        Directory to save the extracted CSV file.
    show_progress : bool, default True
        Show download progress bar.

    Returns
    -------
    Path
        Path to the extracted CSV file.

    Raises
    ------
    StateNotFoundError
        If the state code is invalid.
    TableNotFoundError
        If the table is not found for the state.
    NetworkError
        If the download fails.

    Examples
    --------
    >>> client = DataMartClient()
    >>> csv_path = client.download_table("GA", "PLOT", Path("./data"))
    """
    state = validate_state_code(state)
    table = table.upper()

    url = self._build_csv_url(state, table)
    logger.info(f"Downloading {state}_{table} from {url}")

    # Download to temp file
    with tempfile.TemporaryDirectory() as temp_dir:
        temp_path = Path(temp_dir)
        zip_filename = f"{state}_{table}.zip" if state != "REF" else f"{table}.zip"
        zip_path = temp_path / zip_filename

        try:
            self._download_file(
                url,
                zip_path,
                description=f"{state}_{table}",
                show_progress=show_progress,
            )
        except TableNotFoundError:
            raise TableNotFoundError(table, state)

        # Extract CSV
        extracted = self._extract_zip(zip_path, temp_path, show_progress=False)

        # Find the CSV file
        csv_files = [f for f in extracted if f.suffix.lower() == ".csv"]
        if not csv_files:
            raise DownloadError(f"No CSV file found in {zip_filename}", url=url)

        # Move to destination
        dest_dir.mkdir(parents=True, exist_ok=True)
        csv_file = csv_files[0]
        dest_path = dest_dir / csv_file.name

        shutil.move(str(csv_file), str(dest_path))

        logger.info(f"Extracted {table} to {dest_path}")
        return dest_path

download_tables

download_tables(state: str, tables: list[str] | None = None, common: bool = True, dest_dir: Path | None = None, show_progress: bool = True) -> dict[str, Path]

Download multiple FIA tables for a state.

PARAMETER DESCRIPTION
state

State abbreviation (e.g., 'GA') or 'REF' for reference tables.

TYPE: str

tables

Specific tables to download. If None, uses common or all tables.

TYPE: list of str DEFAULT: None

common

If tables is None, download only common tables (True) or all (False).

TYPE: bool DEFAULT: True

dest_dir

Directory to save files. Defaults to ~/.pyfia/data/{state}/csv/

TYPE: Path DEFAULT: None

show_progress

Show download progress.

TYPE: bool DEFAULT: True

RETURNS DESCRIPTION
dict

Mapping of table names to downloaded file paths.

Examples:

>>> client = DataMartClient()
>>> paths = client.download_tables("GA", common=True)
>>> print(f"Downloaded {len(paths)} tables")
Source code in src/pyfia/downloader/client.py
def download_tables(
    self,
    state: str,
    tables: list[str] | None = None,
    common: bool = True,
    dest_dir: Path | None = None,
    show_progress: bool = True,
) -> dict[str, Path]:
    """
    Download multiple FIA tables for a state.

    Parameters
    ----------
    state : str
        State abbreviation (e.g., 'GA') or 'REF' for reference tables.
    tables : list of str, optional
        Specific tables to download. If None, uses common or all tables.
    common : bool, default True
        If tables is None, download only common tables (True) or all (False).
    dest_dir : Path, optional
        Directory to save files. Defaults to ~/.pyfia/data/{state}/csv/
    show_progress : bool, default True
        Show download progress.

    Returns
    -------
    dict
        Mapping of table names to downloaded file paths.

    Examples
    --------
    >>> client = DataMartClient()
    >>> paths = client.download_tables("GA", common=True)
    >>> print(f"Downloaded {len(paths)} tables")
    """
    state = validate_state_code(state)

    # Determine tables to download
    if state == "REF":
        tables_to_download = tables or REFERENCE_TABLES
    else:
        tables_to_download = get_tables_for_download(common=common, tables=tables)

    # Set default destination
    if dest_dir is None:
        from pyfia.core.settings import settings

        dest_dir = settings.cache_dir.parent / "data" / state.lower() / "csv"

    dest_dir.mkdir(parents=True, exist_ok=True)

    downloaded = {}
    failed = []

    if show_progress:
        from rich.console import Console

        console = Console()
        console.print(
            f"\n[bold]Downloading {len(tables_to_download)} tables for {state}[/bold]\n"
        )

    for i, table in enumerate(tables_to_download, 1):
        if show_progress:
            from rich.console import Console

            console = Console()
            console.print(f"[{i}/{len(tables_to_download)}] {table}...", end=" ")

        try:
            path = self.download_table(
                state, table, dest_dir, show_progress=show_progress
            )
            downloaded[table] = path
            if show_progress:
                console.print("[green]OK[/green]")
        except (TableNotFoundError, NetworkError) as e:
            failed.append((table, str(e)))
            if show_progress:
                console.print(f"[red]FAILED[/red] ({e})")
            logger.warning(f"Failed to download {state}_{table}: {e}")

    if show_progress:
        from rich.console import Console

        console = Console()
        console.print(
            f"\n[bold green]Downloaded {len(downloaded)}/{len(tables_to_download)} tables[/bold green]"
        )
        if failed:
            console.print(
                f"[yellow]Failed: {', '.join(t for t, _ in failed)}[/yellow]"
            )

    return downloaded

check_url_exists

check_url_exists(url: str) -> bool

Check if a URL exists (HEAD request).

PARAMETER DESCRIPTION
url

URL to check.

TYPE: str

RETURNS DESCRIPTION
bool

True if URL exists (status 200), False otherwise.

Source code in src/pyfia/downloader/client.py
def check_url_exists(self, url: str) -> bool:
    """
    Check if a URL exists (HEAD request).

    Parameters
    ----------
    url : str
        URL to check.

    Returns
    -------
    bool
        True if URL exists (status 200), False otherwise.
    """
    try:
        response = self.session.head(url, timeout=30, allow_redirects=True)
        return bool(response.status_code == 200)
    except requests.exceptions.RequestException:
        return False

Download Cache

DownloadCache

DownloadCache(cache_dir: Path)

Manages cached FIA data downloads with metadata tracking.

The cache stores metadata about downloaded DuckDB files including timestamps, checksums, and file locations. This allows skipping downloads for files that are already present and valid.

PARAMETER DESCRIPTION
cache_dir

Directory for cache storage.

TYPE: Path

Examples:

>>> cache = DownloadCache(Path("~/.pyfia/cache"))
>>> if not cache.get_cached("GA"):
...     # Download the file
...     cache.add_to_cache("GA", downloaded_path)
Source code in src/pyfia/downloader/cache.py
def __init__(self, cache_dir: Path):
    self.cache_dir = Path(cache_dir).expanduser()
    self.cache_dir.mkdir(parents=True, exist_ok=True)
    self.metadata_file = self.cache_dir / self.METADATA_FILE
    self._metadata: dict[str, CachedDownload] = {}
    self._load_metadata()

get_cached

get_cached(state: str, max_age_days: float | None = None) -> Path | None

Get the path to a cached DuckDB file if it exists and is valid.

PARAMETER DESCRIPTION
state

State abbreviation or cache key (e.g., 'GA', 'MERGED_FL_GA_SC').

TYPE: str

max_age_days

Maximum age in days to consider cache valid. Defaults to None (no age limit).

TYPE: float DEFAULT: None

RETURNS DESCRIPTION
Path or None

Path to the cached DuckDB file, or None if not found/invalid.

Source code in src/pyfia/downloader/cache.py
def get_cached(self, state: str, max_age_days: float | None = None) -> Path | None:
    """
    Get the path to a cached DuckDB file if it exists and is valid.

    Parameters
    ----------
    state : str
        State abbreviation or cache key (e.g., 'GA', 'MERGED_FL_GA_SC').
    max_age_days : float, optional
        Maximum age in days to consider cache valid.
        Defaults to None (no age limit).

    Returns
    -------
    Path or None
        Path to the cached DuckDB file, or None if not found/invalid.
    """
    key = self._get_cache_key(state)

    if key not in self._metadata:
        return None

    cached = self._metadata[key]
    path = Path(cached.path)

    # Check file exists
    if not path.exists():
        logger.debug(f"Cached file no longer exists: {path}")
        del self._metadata[key]
        self._save_metadata()
        return None

    # Check age if specified
    if max_age_days is not None and cached.age_days > max_age_days:
        logger.debug(f"Cached file too old: {cached.age_days:.1f} days")
        return None

    return path

add_to_cache

add_to_cache(state: str, path: Path, checksum: str | None = None) -> None

Add a downloaded DuckDB file to the cache.

PARAMETER DESCRIPTION
state

State abbreviation or cache key (e.g., 'GA', 'MERGED_FL_GA_SC').

TYPE: str

path

Path to the downloaded DuckDB file.

TYPE: Path

checksum

MD5 checksum of the file. Calculated if not provided.

TYPE: str DEFAULT: None

Source code in src/pyfia/downloader/cache.py
def add_to_cache(
    self,
    state: str,
    path: Path,
    checksum: str | None = None,
) -> None:
    """
    Add a downloaded DuckDB file to the cache.

    Parameters
    ----------
    state : str
        State abbreviation or cache key (e.g., 'GA', 'MERGED_FL_GA_SC').
    path : Path
        Path to the downloaded DuckDB file.
    checksum : str, optional
        MD5 checksum of the file. Calculated if not provided.
    """
    key = self._get_cache_key(state)
    path = Path(path)

    if not path.exists():
        raise FileNotFoundError(f"Cannot cache non-existent file: {path}")

    # Calculate checksum if not provided
    if checksum is None:
        import hashlib

        md5 = hashlib.md5()
        with open(path, "rb") as f:
            for chunk in iter(lambda: f.read(8192), b""):
                md5.update(chunk)
        checksum = md5.hexdigest()

    cached = CachedDownload(
        state=state.upper(),
        path=str(path.absolute()),
        downloaded_at=datetime.now().isoformat(),
        size_bytes=path.stat().st_size,
        checksum=checksum,
    )

    self._metadata[key] = cached
    self._save_metadata()
    logger.debug(f"Added to cache: {key} -> {path}")

clear_cache

clear_cache(older_than: timedelta | None = None, state: str | None = None, delete_files: bool = False) -> int

Clear cached entries.

PARAMETER DESCRIPTION
older_than

Only clear entries older than this. If None, clear all.

TYPE: timedelta DEFAULT: None

state

Only clear entries for this state.

TYPE: str DEFAULT: None

delete_files

If True, also delete the cached files from disk.

TYPE: bool DEFAULT: False

RETURNS DESCRIPTION
int

Number of entries cleared.

Source code in src/pyfia/downloader/cache.py
def clear_cache(
    self,
    older_than: timedelta | None = None,
    state: str | None = None,
    delete_files: bool = False,
) -> int:
    """
    Clear cached entries.

    Parameters
    ----------
    older_than : timedelta, optional
        Only clear entries older than this. If None, clear all.
    state : str, optional
        Only clear entries for this state.
    delete_files : bool, default False
        If True, also delete the cached files from disk.

    Returns
    -------
    int
        Number of entries cleared.
    """
    to_remove = []
    cutoff_days = older_than.total_seconds() / 86400 if older_than else None

    for key, cached in self._metadata.items():
        # Filter by state
        if state and cached.state != state.upper():
            continue

        # Filter by age
        if cutoff_days and cached.age_days <= cutoff_days:
            continue

        to_remove.append(key)

        # Delete file if requested
        if delete_files:
            path = Path(cached.path)
            if path.exists():
                try:
                    path.unlink()
                    logger.debug(f"Deleted cached file: {path}")
                except OSError as e:
                    logger.warning(f"Failed to delete {path}: {e}")

    # Remove from metadata
    for key in to_remove:
        del self._metadata[key]

    if to_remove:
        self._save_metadata()

    return len(to_remove)

get_cache_info

get_cache_info() -> dict

Get information about the cache.

RETURNS DESCRIPTION
dict

Cache statistics including total size, file count, etc.

Source code in src/pyfia/downloader/cache.py
def get_cache_info(self) -> dict:
    """
    Get information about the cache.

    Returns
    -------
    dict
        Cache statistics including total size, file count, etc.
    """
    total_size = 0
    file_count = 0
    states = set()
    stale_count = 0

    for cached in self._metadata.values():
        path = Path(cached.path)
        if path.exists():
            total_size += cached.size_bytes
            file_count += 1
        states.add(cached.state)
        if cached.is_stale:
            stale_count += 1

    return {
        "cache_dir": str(self.cache_dir),
        "total_entries": len(self._metadata),
        "valid_files": file_count,
        "total_size_mb": total_size / (1024 * 1024),
        "states": sorted(states),
        "stale_entries": stale_count,
    }

list_cached

list_cached(state: str | None = None) -> list[CachedDownload]

List cached downloads.

PARAMETER DESCRIPTION
state

Filter by state.

TYPE: str DEFAULT: None

RETURNS DESCRIPTION
list of CachedDownload

List of cached download metadata.

Source code in src/pyfia/downloader/cache.py
def list_cached(self, state: str | None = None) -> list[CachedDownload]:
    """
    List cached downloads.

    Parameters
    ----------
    state : str, optional
        Filter by state.

    Returns
    -------
    list of CachedDownload
        List of cached download metadata.
    """
    results = []
    for cached in self._metadata.values():
        if state and cached.state != state.upper():
            continue
        results.append(cached)

    return sorted(results, key=lambda x: x.state)

Cache Management Functions

clear_cache

clear_cache(older_than_days: int | None = None, state: str | None = None, delete_files: bool = False) -> int

Clear the download cache.

PARAMETER DESCRIPTION
older_than_days

Only clear entries older than this many days.

TYPE: int DEFAULT: None

state

Only clear entries for this state.

TYPE: str DEFAULT: None

delete_files

If True, also delete the cached files from disk.

TYPE: bool DEFAULT: False

RETURNS DESCRIPTION
int

Number of cache entries cleared.

Source code in src/pyfia/downloader/__init__.py
def clear_cache(
    older_than_days: int | None = None,
    state: str | None = None,
    delete_files: bool = False,
) -> int:
    """
    Clear the download cache.

    Parameters
    ----------
    older_than_days : int, optional
        Only clear entries older than this many days.
    state : str, optional
        Only clear entries for this state.
    delete_files : bool, default False
        If True, also delete the cached files from disk.

    Returns
    -------
    int
        Number of cache entries cleared.
    """
    from datetime import timedelta

    data_dir = _get_default_data_dir()
    cache = DownloadCache(data_dir / ".cache")

    older_than = timedelta(days=older_than_days) if older_than_days else None

    return cache.clear_cache(
        older_than=older_than, state=state, delete_files=delete_files
    )

cache_info

cache_info() -> dict

Get information about the download cache.

RETURNS DESCRIPTION
dict

Cache statistics including size, file count, etc.

Source code in src/pyfia/downloader/__init__.py
def cache_info() -> dict:
    """
    Get information about the download cache.

    Returns
    -------
    dict
        Cache statistics including size, file count, etc.
    """
    data_dir = _get_default_data_dir()
    cache = DownloadCache(data_dir / ".cache")
    return cache.get_cache_info()

Table Definitions

COMMON_TABLES module-attribute

COMMON_TABLES: list[str] = ['COND', 'COND_DWM_CALC', 'INVASIVE_SUBPLOT_SPP', 'PLOT', 'POP_ESTN_UNIT', 'POP_EVAL', 'POP_EVAL_GRP', 'POP_EVAL_TYP', 'POP_PLOT_STRATUM_ASSGN', 'POP_STRATUM', 'SUBPLOT', 'TREE', 'TREE_GRM_COMPONENT', 'TREE_GRM_MIDPT', 'TREE_GRM_BEGIN', 'SUBP_COND_CHNG_MTRX', 'SEEDLING', 'SURVEY', 'SUBP_COND', 'P2VEG_SUBP_STRUCTURE']

ALL_TABLES module-attribute

ALL_TABLES: list[str] = ['BOUNDARY', 'COND', 'COND_DWM_CALC', 'COUNTY', 'DWM_COARSE_WOODY_DEBRIS', 'DWM_DUFF_LITTER_FUEL', 'DWM_FINE_WOODY_DEBRIS', 'DWM_MICROPLOT_FUEL', 'DWM_RESIDUAL_PILE', 'DWM_TRANSECT_SEGMENT', 'DWM_VISIT', 'GRND_CVR', 'INVASIVE_SUBPLOT_SPP', 'LICHEN_LAB', 'LICHEN_PLOT_SUMMARY', 'LICHEN_VISIT', 'OZONE_BIOSITE_SUMMARY', 'OZONE_PLOT', 'OZONE_PLOT_SUMMARY', 'OZONE_SPECIES_SUMMARY', 'OZONE_VALIDATION', 'OZONE_VISIT', 'P2VEG_SUBPLOT_SPP', 'P2VEG_SUBP_STRUCTURE', 'PLOT', 'PLOTGEOM', 'PLOTSNAP', 'POP_ESTN_UNIT', 'POP_EVAL', 'POP_EVAL_ATTRIBUTE', 'POP_EVAL_GRP', 'POP_EVAL_TYP', 'POP_PLOT_STRATUM_ASSGN', 'POP_STRATUM', 'SEEDLING', 'SITETREE', 'SOILS_EROSION', 'SOILS_LAB', 'SOILS_SAMPLE_LOC', 'SOILS_VISIT', 'SUBPLOT', 'SUBP_COND', 'SUBP_COND_CHNG_MTRX', 'SURVEY', 'TREE', 'TREE_GRM_BEGIN', 'TREE_GRM_COMPONENT', 'TREE_GRM_ESTN', 'TREE_GRM_MIDPT', 'TREE_REGIONAL_BIOMASS', 'TREE_WOODLAND_STEMS', 'VEG_PLOT_SPECIES', 'VEG_QUADRAT', 'VEG_SUBPLOT', 'VEG_SUBPLOT_SPP', 'VEG_VISIT']

VALID_STATE_CODES module-attribute

VALID_STATE_CODES: dict[str, str] = {'AL': 'Alabama', 'AK': 'Alaska', 'AZ': 'Arizona', 'AR': 'Arkansas', 'CA': 'California', 'CO': 'Colorado', 'CT': 'Connecticut', 'DE': 'Delaware', 'FL': 'Florida', 'GA': 'Georgia', 'HI': 'Hawaii', 'ID': 'Idaho', 'IL': 'Illinois', 'IN': 'Indiana', 'IA': 'Iowa', 'KS': 'Kansas', 'KY': 'Kentucky', 'LA': 'Louisiana', 'ME': 'Maine', 'MD': 'Maryland', 'MA': 'Massachusetts', 'MI': 'Michigan', 'MN': 'Minnesota', 'MS': 'Mississippi', 'MO': 'Missouri', 'MT': 'Montana', 'NE': 'Nebraska', 'NV': 'Nevada', 'NH': 'New Hampshire', 'NJ': 'New Jersey', 'NM': 'New Mexico', 'NY': 'New York', 'NC': 'North Carolina', 'ND': 'North Dakota', 'OH': 'Ohio', 'OK': 'Oklahoma', 'OR': 'Oregon', 'PA': 'Pennsylvania', 'RI': 'Rhode Island', 'SC': 'South Carolina', 'SD': 'South Dakota', 'TN': 'Tennessee', 'TX': 'Texas', 'UT': 'Utah', 'VT': 'Vermont', 'VA': 'Virginia', 'WA': 'Washington', 'WV': 'West Virginia', 'WI': 'Wisconsin', 'WY': 'Wyoming', 'AS': 'American Samoa', 'FM': 'Federated States of Micronesia', 'GU': 'Guam', 'MH': 'Marshall Islands', 'MP': 'Northern Mariana Islands', 'PW': 'Palau', 'PR': 'Puerto Rico', 'VI': 'Virgin Islands'}

Exceptions

DownloadError

DownloadError(message: str, url: str | None = None)

Bases: Exception

Base exception for all download-related errors.

PARAMETER DESCRIPTION
message

Human-readable error message.

TYPE: str

url

URL that caused the error.

TYPE: str DEFAULT: None

Source code in src/pyfia/downloader/exceptions.py
def __init__(self, message: str, url: str | None = None):
    self.message = message
    self.url = url
    super().__init__(self.message)

StateNotFoundError

StateNotFoundError(state: str, valid_states: list[str] | None = None)

Bases: DownloadError

Raised when an invalid state code is provided.

PARAMETER DESCRIPTION
state

The invalid state code.

TYPE: str

valid_states

List of valid state codes for reference.

TYPE: list of str DEFAULT: None

Source code in src/pyfia/downloader/exceptions.py
def __init__(self, state: str, valid_states: list[str] | None = None):
    self.state = state
    self.valid_states = valid_states
    message = f"Invalid state code: '{state}'"
    if valid_states:
        message += f". Valid codes: {', '.join(sorted(valid_states)[:10])}..."
    super().__init__(message)

TableNotFoundError

TableNotFoundError(table: str, state: str | None = None)

Bases: DownloadError

Raised when a requested table is not available for download.

PARAMETER DESCRIPTION
table

The table name that was not found.

TYPE: str

state

The state for which the table was requested.

TYPE: str DEFAULT: None

Source code in src/pyfia/downloader/exceptions.py
def __init__(self, table: str, state: str | None = None):
    self.table = table
    self.state = state
    message = f"Table '{table}' not found"
    if state:
        message += f" for state '{state}'"
    super().__init__(message)

NetworkError

NetworkError(message: str, url: str | None = None, status_code: int | None = None)

Bases: DownloadError

Raised when a network-related download failure occurs.

PARAMETER DESCRIPTION
message

Description of the network error.

TYPE: str

url

URL that caused the error.

TYPE: str DEFAULT: None

status_code

HTTP status code if available.

TYPE: int DEFAULT: None

Source code in src/pyfia/downloader/exceptions.py
def __init__(
    self, message: str, url: str | None = None, status_code: int | None = None
):
    self.status_code = status_code
    super().__init__(message, url)